1 /*
   2  * Copyright (c) 2000, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jni.h"
  27 #include "jvm.h"
  28 #include "classfile/classFileStream.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "jfr/jfrEvents.hpp"
  31 #include "memory/allocation.inline.hpp"
  32 #include "memory/resourceArea.hpp"
  33 #include "logging/log.hpp"
  34 #include "logging/logStream.hpp"
  35 #include "oops/access.inline.hpp"
  36 #include "oops/fieldStreams.hpp"
  37 #include "oops/objArrayOop.inline.hpp"
  38 #include "oops/oop.inline.hpp"
  39 #include "oops/typeArrayOop.inline.hpp"
  40 #include "oops/valueArrayKlass.hpp"
  41 #include "oops/valueArrayOop.hpp"
  42 #include "oops/valueArrayOop.inline.hpp"
  43 #include "prims/unsafe.hpp"
  44 #include "runtime/atomic.hpp"
  45 #include "runtime/fieldDescriptor.inline.hpp"
  46 #include "runtime/globals.hpp"
  47 #include "runtime/handles.inline.hpp"
  48 #include "runtime/interfaceSupport.inline.hpp"
  49 #include "runtime/jniHandles.inline.hpp"
  50 #include "runtime/orderAccess.hpp"
  51 #include "runtime/reflection.hpp"
  52 #include "runtime/thread.hpp"
  53 #include "runtime/threadSMR.hpp"
  54 #include "runtime/vm_version.hpp"
  55 #include "services/threadService.hpp"
  56 #include "utilities/align.hpp"
  57 #include "utilities/copy.hpp"
  58 #include "utilities/dtrace.hpp"
  59 #include "utilities/macros.hpp"
  60 
  61 /**
  62  * Implementation of the jdk.internal.misc.Unsafe class
  63  */
  64 
  65 
  66 #define MAX_OBJECT_SIZE \
  67   ( arrayOopDesc::header_size(T_DOUBLE) * HeapWordSize \
  68     + ((julong)max_jint * sizeof(double)) )
  69 
  70 
  71 #define UNSAFE_ENTRY(result_type, header) \
  72   JVM_ENTRY(static result_type, header)
  73 
  74 #define UNSAFE_LEAF(result_type, header) \
  75   JVM_LEAF(static result_type, header)
  76 
  77 #define UNSAFE_END JVM_END
  78 
  79 
  80 static inline void* addr_from_java(jlong addr) {
  81   // This assert fails in a variety of ways on 32-bit systems.
  82   // It is impossible to predict whether native code that converts
  83   // pointers to longs will sign-extend or zero-extend the addresses.
  84   //assert(addr == (uintptr_t)addr, "must not be odd high bits");
  85   return (void*)(uintptr_t)addr;
  86 }
  87 
  88 static inline jlong addr_to_java(void* p) {
  89   assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
  90   return (uintptr_t)p;
  91 }
  92 
  93 
  94 // Note: The VM's obj_field and related accessors use byte-scaled
  95 // ("unscaled") offsets, just as the unsafe methods do.
  96 
  97 // However, the method Unsafe.fieldOffset explicitly declines to
  98 // guarantee this.  The field offset values manipulated by the Java user
  99 // through the Unsafe API are opaque cookies that just happen to be byte
 100 // offsets.  We represent this state of affairs by passing the cookies
 101 // through conversion functions when going between the VM and the Unsafe API.
 102 // The conversion functions just happen to be no-ops at present.
 103 
 104 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
 105   return field_offset;
 106 }
 107 
 108 static inline jlong field_offset_from_byte_offset(jlong byte_offset) {
 109   return byte_offset;
 110 }
 111 
 112 static inline void assert_field_offset_sane(oop p, jlong field_offset) {
 113 #ifdef ASSERT
 114   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 115 
 116   if (p != NULL) {
 117     assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
 118     if (byte_offset == (jint)byte_offset) {
 119       void* ptr_plus_disp = (address)p + byte_offset;
 120       assert(p->field_addr_raw((jint)byte_offset) == ptr_plus_disp,
 121              "raw [ptr+disp] must be consistent with oop::field_addr_raw");
 122     }
 123     jlong p_size = HeapWordSize * (jlong)(p->size());
 124     assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
 125   }
 126 #endif
 127 }
 128 
 129 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
 130   assert_field_offset_sane(p, field_offset);
 131   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 132 
 133   if (p != NULL) {
 134     p = Access<>::resolve(p);
 135   }
 136 
 137   if (sizeof(char*) == sizeof(jint)) {   // (this constant folds!)
 138     return (address)p + (jint) byte_offset;
 139   } else {
 140     return (address)p +        byte_offset;
 141   }
 142 }
 143 
 144 // Externally callable versions:
 145 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 146 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 147   return field_offset;
 148 }
 149 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 150   return byte_offset;
 151 }
 152 
 153 ///// Data read/writes on the Java heap and in native (off-heap) memory
 154 
 155 /**
 156  * Helper class for accessing memory.
 157  *
 158  * Normalizes values and wraps accesses in
 159  * JavaThread::doing_unsafe_access() if needed.
 160  */
 161 template <typename T>
 162 class MemoryAccess : StackObj {
 163   JavaThread* _thread;
 164   oop _obj;
 165   ptrdiff_t _offset;
 166 
 167   // Resolves and returns the address of the memory access.
 168   // This raw memory access may fault, so we make sure it happens within the
 169   // guarded scope by making the access volatile at least. Since the store
 170   // of Thread::set_doing_unsafe_access() is also volatile, these accesses
 171   // can not be reordered by the compiler. Therefore, if the access triggers
 172   // a fault, we will know that Thread::doing_unsafe_access() returns true.
 173   volatile T* addr() {
 174     void* addr = index_oop_from_field_offset_long(_obj, _offset);
 175     return static_cast<volatile T*>(addr);
 176   }
 177 
 178   template <typename U>
 179   U normalize_for_write(U x) {
 180     return x;
 181   }
 182 
 183   jboolean normalize_for_write(jboolean x) {
 184     return x & 1;
 185   }
 186 
 187   template <typename U>
 188   U normalize_for_read(U x) {
 189     return x;
 190   }
 191 
 192   jboolean normalize_for_read(jboolean x) {
 193     return x != 0;
 194   }
 195 
 196   /**
 197    * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 198    */
 199   class GuardUnsafeAccess {
 200     JavaThread* _thread;
 201 
 202   public:
 203     GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
 204       // native/off-heap access which may raise SIGBUS if accessing
 205       // memory mapped file data in a region of the file which has
 206       // been truncated and is now invalid
 207       _thread->set_doing_unsafe_access(true);
 208     }
 209 
 210     ~GuardUnsafeAccess() {
 211       _thread->set_doing_unsafe_access(false);
 212     }
 213   };
 214 
 215 public:
 216   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 217     : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
 218     assert_field_offset_sane(_obj, offset);
 219   }
 220 
 221   T get() {
 222     if (_obj == NULL) {
 223       GuardUnsafeAccess guard(_thread);
 224       T ret = RawAccess<>::load(addr());
 225       return normalize_for_read(ret);
 226     } else {
 227       T ret = HeapAccess<>::load_at(_obj, _offset);
 228       return normalize_for_read(ret);
 229     }
 230   }
 231 
 232   void put(T x) {
 233     if (_obj == NULL) {
 234       GuardUnsafeAccess guard(_thread);
 235       RawAccess<>::store(addr(), normalize_for_write(x));
 236     } else {
 237       assert(!_obj->is_value() || _obj->mark()->is_larval_state(), "must be an object instance or a larval value");
 238       HeapAccess<>::store_at(_obj, _offset, normalize_for_write(x));
 239     }
 240   }
 241 
 242   T get_volatile() {
 243     if (_obj == NULL) {
 244       GuardUnsafeAccess guard(_thread);
 245       volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
 246       return normalize_for_read(ret);
 247     } else {
 248       T ret = HeapAccess<MO_SEQ_CST>::load_at(_obj, _offset);
 249       return normalize_for_read(ret);
 250     }
 251   }
 252 
 253   void put_volatile(T x) {
 254     if (_obj == NULL) {
 255       GuardUnsafeAccess guard(_thread);
 256       RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 257     } else {
 258       HeapAccess<MO_SEQ_CST>::store_at(_obj, _offset, normalize_for_write(x));
 259     }
 260   }
 261 };
 262 
 263 #ifdef ASSERT
 264 /*
 265  * Get the field descriptor of the field of the given object at the given offset.
 266  */
 267 static bool get_field_descriptor(oop p, jlong offset, fieldDescriptor* fd) {
 268   bool found = false;
 269   Klass* k = p->klass();
 270   if (k->is_instance_klass()) {
 271     InstanceKlass* ik = InstanceKlass::cast(k);
 272     found = ik->find_field_from_offset((int)offset, false, fd);
 273     if (!found && ik->is_mirror_instance_klass()) {
 274       Klass* k2 = java_lang_Class::as_Klass(p);
 275       if (k2->is_instance_klass()) {
 276         ik = InstanceKlass::cast(k2);
 277         found = ik->find_field_from_offset((int)offset, true, fd);
 278       }
 279     }
 280   }
 281   return found;
 282 }
 283 #endif // ASSERT
 284 
 285 static void assert_and_log_unsafe_value_access(oop p, jlong offset, ValueKlass* vk) {
 286   Klass* k = p->klass();
 287 #ifdef ASSERT
 288   if (k->is_instance_klass()) {
 289     assert_field_offset_sane(p, offset);
 290     fieldDescriptor fd;
 291     bool found = get_field_descriptor(p, offset, &fd);
 292     if (found) {
 293       assert(found, "value field not found");
 294       assert(fd.is_flattened(), "field not flat");
 295     } else {
 296       if (log_is_enabled(Trace, valuetypes)) {
 297         log_trace(valuetypes)("not a field in %s at offset " SIZE_FORMAT_HEX,
 298                               p->klass()->external_name(), offset);
 299       }
 300     }
 301   } else if (k->is_valueArray_klass()) {
 302     ValueArrayKlass* vak = ValueArrayKlass::cast(k);
 303     int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 304     address dest = (address)((valueArrayOop)p)->value_at_addr(index, vak->layout_helper());
 305     assert(dest == ((address)p) + offset, "invalid offset");
 306   } else {
 307     ShouldNotReachHere();
 308   }
 309 #endif // ASSERT
 310   if (log_is_enabled(Trace, valuetypes)) {
 311     if (k->is_valueArray_klass()) {
 312       ValueArrayKlass* vak = ValueArrayKlass::cast(k);
 313       int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 314       address dest = (address)((valueArrayOop)p)->value_at_addr(index, vak->layout_helper());
 315       log_trace(valuetypes)("%s array type %s index %d element size %d offset " SIZE_FORMAT_HEX " at " INTPTR_FORMAT,
 316                             p->klass()->external_name(), vak->external_name(),
 317                             index, vak->element_byte_size(), offset, p2i(dest));
 318     } else {
 319       log_trace(valuetypes)("%s field type %s at offset " SIZE_FORMAT_HEX,
 320                             p->klass()->external_name(), vk->external_name(), offset);
 321     }
 322   }
 323 }
 324 
 325 // These functions allow a null base pointer with an arbitrary address.
 326 // But if the base pointer is non-null, the offset should make some sense.
 327 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 328 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 329   oop p = JNIHandles::resolve(obj);
 330   assert_field_offset_sane(p, offset);
 331   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 332   return JNIHandles::make_local(env, v);
 333 } UNSAFE_END
 334 
 335 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 336   oop x = JNIHandles::resolve(x_h);
 337   oop p = JNIHandles::resolve(obj);
 338   assert_field_offset_sane(p, offset);
 339   assert(!p->is_value() || p->mark()->is_larval_state(), "must be an object instance or a larval value");
 340   HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 341 } UNSAFE_END
 342 
 343 UNSAFE_ENTRY(jlong, Unsafe_ValueHeaderSize(JNIEnv *env, jobject unsafe, jclass c)) {
 344   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
 345   ValueKlass* vk = ValueKlass::cast(k);
 346   return vk->first_field_offset();
 347 } UNSAFE_END
 348 
 349 UNSAFE_ENTRY(jboolean, Unsafe_IsFlattenedArray(JNIEnv *env, jobject unsafe, jclass c)) {
 350   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
 351   return k->is_valueArray_klass();
 352 } UNSAFE_END
 353 
 354 UNSAFE_ENTRY(jobject, Unsafe_UninitializedDefaultValue(JNIEnv *env, jobject unsafe, jclass vc)) {
 355   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 356   ValueKlass* vk = ValueKlass::cast(k);
 357   oop v = vk->default_value();
 358   return JNIHandles::make_local(env, v);
 359 } UNSAFE_END
 360 
 361 UNSAFE_ENTRY(jobject, Unsafe_GetValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jclass vc)) {
 362   oop base = JNIHandles::resolve(obj);
 363   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 364   ValueKlass* vk = ValueKlass::cast(k);
 365   assert_and_log_unsafe_value_access(base, offset, vk);
 366   Handle base_h(THREAD, base);
 367   oop v = vk->allocate_instance(CHECK_NULL); // allocate instance
 368   vk->initialize(CHECK_NULL); // If field is a default value, value class might not be initialized yet
 369   vk->value_store(((address)(oopDesc*)base_h()) + offset,
 370                   vk->data_for_oop(v),
 371                   true, true);
 372   return JNIHandles::make_local(env, v);
 373 } UNSAFE_END
 374 
 375 UNSAFE_ENTRY(void, Unsafe_PutValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jclass vc, jobject value)) {
 376   oop base = JNIHandles::resolve(obj);
 377   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 378   ValueKlass* vk = ValueKlass::cast(k);
 379   assert(!base->is_value() || base->mark()->is_larval_state(), "must be an object instance or a larval value");
 380   assert_and_log_unsafe_value_access(base, offset, vk);
 381   oop v = JNIHandles::resolve(value);
 382   vk->value_store(vk->data_for_oop(v),
 383                  ((address)(oopDesc*)base) + offset, true, true);
 384 } UNSAFE_END
 385 
 386 UNSAFE_ENTRY(jobject, Unsafe_MakePrivateBuffer(JNIEnv *env, jobject unsafe, jobject value)) {
 387   oop v = JNIHandles::resolve_non_null(value);
 388   assert(v->is_value(), "must be a value instance");
 389   Handle vh(THREAD, v);
 390   ValueKlass* vk = ValueKlass::cast(v->klass());
 391   instanceOop new_value = vk->allocate_instance(CHECK_NULL);
 392   vk->value_store(vk->data_for_oop(vh()), vk->data_for_oop(new_value), true, false);
 393   markOop mark = new_value->mark();
 394   new_value->set_mark(mark->enter_larval_state());
 395   return JNIHandles::make_local(env, new_value);
 396 } UNSAFE_END
 397 
 398 UNSAFE_ENTRY(jobject, Unsafe_FinishPrivateBuffer(JNIEnv *env, jobject unsafe, jobject value)) {
 399   oop v = JNIHandles::resolve(value);
 400   assert(v->mark()->is_larval_state(), "must be a larval value");
 401   markOop mark = v->mark();
 402   v->set_mark(mark->exit_larval_state());
 403   return JNIHandles::make_local(env, v);
 404 } UNSAFE_END
 405 
 406 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 407   oop p = JNIHandles::resolve(obj);
 408   assert_field_offset_sane(p, offset);
 409   oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 410   return JNIHandles::make_local(env, v);
 411 } UNSAFE_END
 412 
 413 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 414   oop x = JNIHandles::resolve(x_h);
 415   oop p = JNIHandles::resolve(obj);
 416   assert_field_offset_sane(p, offset);
 417   HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 418 } UNSAFE_END
 419 
 420 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 421   oop v = *(oop*) (address) addr;
 422   return JNIHandles::make_local(env, v);
 423 } UNSAFE_END
 424 
 425 UNSAFE_LEAF(jboolean, Unsafe_isBigEndian0(JNIEnv *env, jobject unsafe)) {
 426 #ifdef VM_LITTLE_ENDIAN
 427   return false;
 428 #else
 429   return true;
 430 #endif
 431 } UNSAFE_END
 432 
 433 UNSAFE_LEAF(jint, Unsafe_unalignedAccess0(JNIEnv *env, jobject unsafe)) {
 434   return UseUnalignedAccesses;
 435 } UNSAFE_END
 436 
 437 #define DEFINE_GETSETOOP(java_type, Type) \
 438  \
 439 UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 440   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 441 } UNSAFE_END \
 442  \
 443 UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 444   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 445 } UNSAFE_END \
 446  \
 447 // END DEFINE_GETSETOOP.
 448 
 449 DEFINE_GETSETOOP(jboolean, Boolean)
 450 DEFINE_GETSETOOP(jbyte, Byte)
 451 DEFINE_GETSETOOP(jshort, Short);
 452 DEFINE_GETSETOOP(jchar, Char);
 453 DEFINE_GETSETOOP(jint, Int);
 454 DEFINE_GETSETOOP(jlong, Long);
 455 DEFINE_GETSETOOP(jfloat, Float);
 456 DEFINE_GETSETOOP(jdouble, Double);
 457 
 458 #undef DEFINE_GETSETOOP
 459 
 460 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
 461  \
 462 UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 463   return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
 464 } UNSAFE_END \
 465  \
 466 UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 467   MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
 468 } UNSAFE_END \
 469  \
 470 // END DEFINE_GETSETOOP_VOLATILE.
 471 
 472 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
 473 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
 474 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
 475 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
 476 DEFINE_GETSETOOP_VOLATILE(jint, Int);
 477 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
 478 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
 479 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 480 
 481 #undef DEFINE_GETSETOOP_VOLATILE
 482 
 483 UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
 484   OrderAccess::acquire();
 485 } UNSAFE_END
 486 
 487 UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {
 488   OrderAccess::release();
 489 } UNSAFE_END
 490 
 491 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
 492   OrderAccess::fence();
 493 } UNSAFE_END
 494 
 495 ////// Allocation requests
 496 
 497 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
 498   ThreadToNativeFromVM ttnfv(thread);
 499   return env->AllocObject(cls);
 500 } UNSAFE_END
 501 
 502 UNSAFE_ENTRY(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
 503   size_t sz = (size_t)size;
 504 
 505   sz = align_up(sz, HeapWordSize);
 506   void* x = os::malloc(sz, mtOther);
 507 
 508   return addr_to_java(x);
 509 } UNSAFE_END
 510 
 511 UNSAFE_ENTRY(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
 512   void* p = addr_from_java(addr);
 513   size_t sz = (size_t)size;
 514   sz = align_up(sz, HeapWordSize);
 515 
 516   void* x = os::realloc(p, sz, mtOther);
 517 
 518   return addr_to_java(x);
 519 } UNSAFE_END
 520 
 521 UNSAFE_ENTRY(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
 522   void* p = addr_from_java(addr);
 523 
 524   os::free(p);
 525 } UNSAFE_END
 526 
 527 UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
 528   size_t sz = (size_t)size;
 529 
 530   oop base = JNIHandles::resolve(obj);
 531   void* p = index_oop_from_field_offset_long(base, offset);
 532 
 533   Copy::fill_to_memory_atomic(p, sz, value);
 534 } UNSAFE_END
 535 
 536 UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
 537   size_t sz = (size_t)size;
 538 
 539   oop srcp = JNIHandles::resolve(srcObj);
 540   oop dstp = JNIHandles::resolve(dstObj);
 541 
 542   void* src = index_oop_from_field_offset_long(srcp, srcOffset);
 543   void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
 544 
 545   Copy::conjoint_memory_atomic(src, dst, sz);
 546 } UNSAFE_END
 547 
 548 // This function is a leaf since if the source and destination are both in native memory
 549 // the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
 550 // If either source or destination (or both) are on the heap, the function will enter VM using
 551 // JVM_ENTRY_FROM_LEAF
 552 UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
 553   size_t sz = (size_t)size;
 554   size_t esz = (size_t)elemSize;
 555 
 556   if (srcObj == NULL && dstObj == NULL) {
 557     // Both src & dst are in native memory
 558     address src = (address)srcOffset;
 559     address dst = (address)dstOffset;
 560 
 561     Copy::conjoint_swap(src, dst, sz, esz);
 562   } else {
 563     // At least one of src/dst are on heap, transition to VM to access raw pointers
 564 
 565     JVM_ENTRY_FROM_LEAF(env, void, Unsafe_CopySwapMemory0) {
 566       oop srcp = JNIHandles::resolve(srcObj);
 567       oop dstp = JNIHandles::resolve(dstObj);
 568 
 569       address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
 570       address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
 571 
 572       Copy::conjoint_swap(src, dst, sz, esz);
 573     } JVM_END
 574   }
 575 } UNSAFE_END
 576 
 577 ////// Random queries
 578 
 579 UNSAFE_LEAF(jint, Unsafe_AddressSize0(JNIEnv *env, jobject unsafe)) {
 580   return sizeof(void*);
 581 } UNSAFE_END
 582 
 583 UNSAFE_LEAF(jint, Unsafe_PageSize()) {
 584   return os::vm_page_size();
 585 } UNSAFE_END
 586 
 587 static jlong find_field_offset(jclass clazz, jstring name, TRAPS) {
 588   assert(clazz != NULL, "clazz must not be NULL");
 589   assert(name != NULL, "name must not be NULL");
 590 
 591   ResourceMark rm(THREAD);
 592   char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
 593 
 594   InstanceKlass* k = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz)));
 595 
 596   jint offset = -1;
 597   for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
 598     Symbol *name = fs.name();
 599     if (name->equals(utf_name)) {
 600       offset = fs.offset();
 601       break;
 602     }
 603   }
 604   if (offset < 0) {
 605     THROW_0(vmSymbols::java_lang_InternalError());
 606   }
 607   return field_offset_from_byte_offset(offset);
 608 }
 609 
 610 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
 611   assert(field != NULL, "field must not be NULL");
 612 
 613   oop reflected   = JNIHandles::resolve_non_null(field);
 614   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 615   Klass* k        = java_lang_Class::as_Klass(mirror);
 616   int slot        = java_lang_reflect_Field::slot(reflected);
 617   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 618 
 619   if (must_be_static >= 0) {
 620     int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
 621     if (must_be_static != really_is_static) {
 622       THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 623     }
 624   }
 625 
 626   int offset = InstanceKlass::cast(k)->field_offset(slot);
 627   return field_offset_from_byte_offset(offset);
 628 }
 629 
 630 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 631   return find_field_offset(field, 0, THREAD);
 632 } UNSAFE_END
 633 
 634 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset1(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
 635   return find_field_offset(c, name, THREAD);
 636 } UNSAFE_END
 637 
 638 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 639   return find_field_offset(field, 1, THREAD);
 640 } UNSAFE_END
 641 
 642 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
 643   assert(field != NULL, "field must not be NULL");
 644 
 645   // Note:  In this VM implementation, a field address is always a short
 646   // offset from the base of a a klass metaobject.  Thus, the full dynamic
 647   // range of the return type is never used.  However, some implementations
 648   // might put the static field inside an array shared by many classes,
 649   // or even at a fixed address, in which case the address could be quite
 650   // large.  In that last case, this function would return NULL, since
 651   // the address would operate alone, without any base pointer.
 652 
 653   oop reflected   = JNIHandles::resolve_non_null(field);
 654   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 655   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 656 
 657   if ((modifiers & JVM_ACC_STATIC) == 0) {
 658     THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 659   }
 660 
 661   return JNIHandles::make_local(env, mirror);
 662 } UNSAFE_END
 663 
 664 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 665   assert(clazz != NULL, "clazz must not be NULL");
 666 
 667   oop mirror = JNIHandles::resolve_non_null(clazz);
 668 
 669   Klass* klass = java_lang_Class::as_Klass(mirror);
 670   if (klass != NULL && klass->should_be_initialized()) {
 671     InstanceKlass* k = InstanceKlass::cast(klass);
 672     k->initialize(CHECK);
 673   }
 674 }
 675 UNSAFE_END
 676 
 677 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 678   assert(clazz != NULL, "clazz must not be NULL");
 679 
 680   oop mirror = JNIHandles::resolve_non_null(clazz);
 681   Klass* klass = java_lang_Class::as_Klass(mirror);
 682 
 683   if (klass != NULL && klass->should_be_initialized()) {
 684     return true;
 685   }
 686 
 687   return false;
 688 }
 689 UNSAFE_END
 690 
 691 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
 692   assert(clazz != NULL, "clazz must not be NULL");
 693 
 694   oop mirror = JNIHandles::resolve_non_null(clazz);
 695   Klass* k = java_lang_Class::as_Klass(mirror);
 696 
 697   if (k == NULL || !k->is_array_klass()) {
 698     THROW(vmSymbols::java_lang_InvalidClassException());
 699   } else if (k->is_objArray_klass()) {
 700     base  = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
 701     scale = heapOopSize;
 702   } else if (k->is_typeArray_klass()) {
 703     TypeArrayKlass* tak = TypeArrayKlass::cast(k);
 704     base  = tak->array_header_in_bytes();
 705     assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
 706     scale = (1 << tak->log2_element_size());
 707   } else if (k->is_valueArray_klass()) {
 708     ValueArrayKlass* vak = ValueArrayKlass::cast(k);
 709     ValueKlass* vklass = vak->element_klass();
 710     base = vak->array_header_in_bytes();
 711     scale = vak->element_byte_size();
 712   } else {
 713     ShouldNotReachHere();
 714   }
 715 }
 716 
 717 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 718   int base = 0, scale = 0;
 719   getBaseAndScale(base, scale, clazz, CHECK_0);
 720 
 721   return field_offset_from_byte_offset(base);
 722 } UNSAFE_END
 723 
 724 
 725 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 726   int base = 0, scale = 0;
 727   getBaseAndScale(base, scale, clazz, CHECK_0);
 728 
 729   // This VM packs both fields and array elements down to the byte.
 730   // But watch out:  If this changes, so that array references for
 731   // a given primitive type (say, T_BOOLEAN) use different memory units
 732   // than fields, this method MUST return zero for such arrays.
 733   // For example, the VM used to store sub-word sized fields in full
 734   // words in the object layout, so that accessors like getByte(Object,int)
 735   // did not really do what one might expect for arrays.  Therefore,
 736   // this function used to report a zero scale factor, so that the user
 737   // would know not to attempt to access sub-word array elements.
 738   // // Code for unpacked fields:
 739   // if (scale < wordSize)  return 0;
 740 
 741   // The following allows for a pretty general fieldOffset cookie scheme,
 742   // but requires it to be linear in byte offset.
 743   return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
 744 } UNSAFE_END
 745 
 746 
 747 static inline void throw_new(JNIEnv *env, const char *ename) {
 748   jclass cls = env->FindClass(ename);
 749   if (env->ExceptionCheck()) {
 750     env->ExceptionClear();
 751     tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
 752     return;
 753   }
 754 
 755   env->ThrowNew(cls, NULL);
 756 }
 757 
 758 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
 759   // Code lifted from JDK 1.3 ClassLoader.c
 760 
 761   jbyte *body;
 762   char *utfName = NULL;
 763   jclass result = 0;
 764   char buf[128];
 765 
 766   assert(data != NULL, "Class bytes must not be NULL");
 767   assert(length >= 0, "length must not be negative: %d", length);
 768 
 769   if (UsePerfData) {
 770     ClassLoader::unsafe_defineClassCallCounter()->inc();
 771   }
 772 
 773   body = NEW_C_HEAP_ARRAY(jbyte, length, mtInternal);
 774   if (body == NULL) {
 775     throw_new(env, "java/lang/OutOfMemoryError");
 776     return 0;
 777   }
 778 
 779   env->GetByteArrayRegion(data, offset, length, body);
 780   if (env->ExceptionOccurred()) {
 781     goto free_body;
 782   }
 783 
 784   if (name != NULL) {
 785     uint len = env->GetStringUTFLength(name);
 786     int unicode_len = env->GetStringLength(name);
 787 
 788     if (len >= sizeof(buf)) {
 789       utfName = NEW_C_HEAP_ARRAY(char, len + 1, mtInternal);
 790       if (utfName == NULL) {
 791         throw_new(env, "java/lang/OutOfMemoryError");
 792         goto free_body;
 793       }
 794     } else {
 795       utfName = buf;
 796     }
 797 
 798     env->GetStringUTFRegion(name, 0, unicode_len, utfName);
 799 
 800     for (uint i = 0; i < len; i++) {
 801       if (utfName[i] == '.')   utfName[i] = '/';
 802     }
 803   }
 804 
 805   result = JVM_DefineClass(env, utfName, loader, body, length, pd);
 806 
 807   if (utfName && utfName != buf) {
 808     FREE_C_HEAP_ARRAY(char, utfName);
 809   }
 810 
 811  free_body:
 812   FREE_C_HEAP_ARRAY(jbyte, body);
 813   return result;
 814 }
 815 
 816 
 817 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
 818   ThreadToNativeFromVM ttnfv(thread);
 819 
 820   return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
 821 } UNSAFE_END
 822 
 823 
 824 // define a class but do not make it known to the class loader or system dictionary
 825 // - host_class:  supplies context for linkage, access control, protection domain, and class loader
 826 //                if host_class is itself anonymous then it is replaced with its host class.
 827 // - data:  bytes of a class file, a raw memory address (length gives the number of bytes)
 828 // - cp_patches:  where non-null entries exist, they replace corresponding CP entries in data
 829 
 830 // When you load an anonymous class U, it works as if you changed its name just before loading,
 831 // to a name that you will never use again.  Since the name is lost, no other class can directly
 832 // link to any member of U.  Just after U is loaded, the only way to use it is reflectively,
 833 // through java.lang.Class methods like Class.newInstance.
 834 
 835 // The package of an anonymous class must either match its host's class's package or be in the
 836 // unnamed package.  If it is in the unnamed package then it will be put in its host class's
 837 // package.
 838 //
 839 
 840 // Access checks for linkage sites within U continue to follow the same rules as for named classes.
 841 // An anonymous class also has special privileges to access any member of its host class.
 842 // This is the main reason why this loading operation is unsafe.  The purpose of this is to
 843 // allow language implementations to simulate "open classes"; a host class in effect gets
 844 // new code when an anonymous class is loaded alongside it.  A less convenient but more
 845 // standard way to do this is with reflection, which can also be set to ignore access
 846 // restrictions.
 847 
 848 // Access into an anonymous class is possible only through reflection.  Therefore, there
 849 // are no special access rules for calling into an anonymous class.  The relaxed access
 850 // rule for the host class is applied in the opposite direction:  A host class reflectively
 851 // access one of its anonymous classes.
 852 
 853 // If you load the same bytecodes twice, you get two different classes.  You can reload
 854 // the same bytecodes with or without varying CP patches.
 855 
 856 // By using the CP patching array, you can have a new anonymous class U2 refer to an older one U1.
 857 // The bytecodes for U2 should refer to U1 by a symbolic name (doesn't matter what the name is).
 858 // The CONSTANT_Class entry for that name can be patched to refer directly to U1.
 859 
 860 // This allows, for example, U2 to use U1 as a superclass or super-interface, or as
 861 // an outer class (so that U2 is an anonymous inner class of anonymous U1).
 862 // It is not possible for a named class, or an older anonymous class, to refer by
 863 // name (via its CP) to a newer anonymous class.
 864 
 865 // CP patching may also be used to modify (i.e., hack) the names of methods, classes,
 866 // or type descriptors used in the loaded anonymous class.
 867 
 868 // Finally, CP patching may be used to introduce "live" objects into the constant pool,
 869 // instead of "dead" strings.  A compiled statement like println((Object)"hello") can
 870 // be changed to println(greeting), where greeting is an arbitrary object created before
 871 // the anonymous class is loaded.  This is useful in dynamic languages, in which
 872 // various kinds of metaobjects must be introduced as constants into bytecode.
 873 // Note the cast (Object), which tells the verifier to expect an arbitrary object,
 874 // not just a literal string.  For such ldc instructions, the verifier uses the
 875 // type Object instead of String, if the loaded constant is not in fact a String.
 876 
 877 static InstanceKlass*
 878 Unsafe_DefineAnonymousClass_impl(JNIEnv *env,
 879                                  jclass host_class, jbyteArray data, jobjectArray cp_patches_jh,
 880                                  u1** temp_alloc,
 881                                  TRAPS) {
 882   assert(host_class != NULL, "host_class must not be NULL");
 883   assert(data != NULL, "data must not be NULL");
 884 
 885   if (UsePerfData) {
 886     ClassLoader::unsafe_defineClassCallCounter()->inc();
 887   }
 888 
 889   jint length = typeArrayOop(JNIHandles::resolve_non_null(data))->length();
 890   assert(length >= 0, "class_bytes_length must not be negative: %d", length);
 891 
 892   int class_bytes_length = (int) length;
 893 
 894   u1* class_bytes = NEW_C_HEAP_ARRAY(u1, length, mtInternal);
 895   if (class_bytes == NULL) {
 896     THROW_0(vmSymbols::java_lang_OutOfMemoryError());
 897   }
 898 
 899   // caller responsible to free it:
 900   *temp_alloc = class_bytes;
 901 
 902   ArrayAccess<>::arraycopy_to_native(arrayOop(JNIHandles::resolve_non_null(data)), typeArrayOopDesc::element_offset<jbyte>(0),
 903                                      reinterpret_cast<jbyte*>(class_bytes), length);
 904 
 905   objArrayHandle cp_patches_h;
 906   if (cp_patches_jh != NULL) {
 907     oop p = JNIHandles::resolve_non_null(cp_patches_jh);
 908     assert(p->is_objArray(), "cp_patches must be an object[]");
 909     cp_patches_h = objArrayHandle(THREAD, (objArrayOop)p);
 910   }
 911 
 912   const Klass* host_klass = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(host_class));
 913 
 914   // Make sure it's the real host class, not another anonymous class.
 915   while (host_klass != NULL && host_klass->is_instance_klass() &&
 916          InstanceKlass::cast(host_klass)->is_unsafe_anonymous()) {
 917     host_klass = InstanceKlass::cast(host_klass)->unsafe_anonymous_host();
 918   }
 919 
 920   // Primitive types have NULL Klass* fields in their java.lang.Class instances.
 921   if (host_klass == NULL) {
 922     THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), "Host class is null");
 923   }
 924 
 925   assert(host_klass->is_instance_klass(), "Host class must be an instance class");
 926 
 927   const char* host_source = host_klass->external_name();
 928   Handle      host_loader(THREAD, host_klass->class_loader());
 929   Handle      host_domain(THREAD, host_klass->protection_domain());
 930 
 931   GrowableArray<Handle>* cp_patches = NULL;
 932 
 933   if (cp_patches_h.not_null()) {
 934     int alen = cp_patches_h->length();
 935 
 936     for (int i = alen-1; i >= 0; i--) {
 937       oop p = cp_patches_h->obj_at(i);
 938       if (p != NULL) {
 939         Handle patch(THREAD, p);
 940 
 941         if (cp_patches == NULL) {
 942           cp_patches = new GrowableArray<Handle>(i+1, i+1, Handle());
 943         }
 944 
 945         cp_patches->at_put(i, patch);
 946       }
 947     }
 948   }
 949 
 950   ClassFileStream st(class_bytes, class_bytes_length, host_source, ClassFileStream::verify);
 951 
 952   Symbol* no_class_name = NULL;
 953   Klass* anonk = SystemDictionary::parse_stream(no_class_name,
 954                                                 host_loader,
 955                                                 host_domain,
 956                                                 &st,
 957                                                 InstanceKlass::cast(host_klass),
 958                                                 cp_patches,
 959                                                 CHECK_NULL);
 960   if (anonk == NULL) {
 961     return NULL;
 962   }
 963 
 964   return InstanceKlass::cast(anonk);
 965 }
 966 
 967 UNSAFE_ENTRY(jclass, Unsafe_DefineAnonymousClass0(JNIEnv *env, jobject unsafe, jclass host_class, jbyteArray data, jobjectArray cp_patches_jh)) {
 968   ResourceMark rm(THREAD);
 969 
 970   jobject res_jh = NULL;
 971   u1* temp_alloc = NULL;
 972 
 973   InstanceKlass* anon_klass = Unsafe_DefineAnonymousClass_impl(env, host_class, data, cp_patches_jh, &temp_alloc, THREAD);
 974   if (anon_klass != NULL) {
 975     res_jh = JNIHandles::make_local(env, anon_klass->java_mirror());
 976   }
 977 
 978   // try/finally clause:
 979   if (temp_alloc != NULL) {
 980     FREE_C_HEAP_ARRAY(u1, temp_alloc);
 981   }
 982 
 983   // The anonymous class loader data has been artificially been kept alive to
 984   // this point.   The mirror and any instances of this class have to keep
 985   // it alive afterwards.
 986   if (anon_klass != NULL) {
 987     anon_klass->class_loader_data()->dec_keep_alive();
 988   }
 989 
 990   // let caller initialize it as needed...
 991 
 992   return (jclass) res_jh;
 993 } UNSAFE_END
 994 
 995 
 996 
 997 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
 998   ThreadToNativeFromVM ttnfv(thread);
 999   env->Throw(thr);
1000 } UNSAFE_END
1001 
1002 // JSR166 ------------------------------------------------------------------
1003 
1004 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
1005   oop x = JNIHandles::resolve(x_h);
1006   oop e = JNIHandles::resolve(e_h);
1007   oop p = JNIHandles::resolve(obj);
1008   assert_field_offset_sane(p, offset);
1009   oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
1010   return JNIHandles::make_local(env, res);
1011 } UNSAFE_END
1012 
1013 UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
1014   oop p = JNIHandles::resolve(obj);
1015   if (p == NULL) {
1016     volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
1017     return RawAccess<>::atomic_cmpxchg(x, addr, e);
1018   } else {
1019     assert_field_offset_sane(p, offset);
1020     return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
1021   }
1022 } UNSAFE_END
1023 
1024 UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
1025   oop p = JNIHandles::resolve(obj);
1026   if (p == NULL) {
1027     volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
1028     return RawAccess<>::atomic_cmpxchg(x, addr, e);
1029   } else {
1030     assert_field_offset_sane(p, offset);
1031     return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
1032   }
1033 } UNSAFE_END
1034 
1035 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
1036   oop x = JNIHandles::resolve(x_h);
1037   oop e = JNIHandles::resolve(e_h);
1038   oop p = JNIHandles::resolve(obj);
1039   assert_field_offset_sane(p, offset);
1040   oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
1041   return oopDesc::equals(ret, e);
1042 } UNSAFE_END
1043 
1044 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
1045   oop p = JNIHandles::resolve(obj);
1046   if (p == NULL) {
1047     volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
1048     return RawAccess<>::atomic_cmpxchg(x, addr, e) == e;
1049   } else {
1050     assert_field_offset_sane(p, offset);
1051     return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e) == e;
1052   }
1053 } UNSAFE_END
1054 
1055 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
1056   oop p = JNIHandles::resolve(obj);
1057   if (p == NULL) {
1058     volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
1059     return RawAccess<>::atomic_cmpxchg(x, addr, e) == e;
1060   } else {
1061     assert_field_offset_sane(p, offset);
1062     return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e) == e;
1063   }
1064 } UNSAFE_END
1065 
1066 static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
1067   assert(event != NULL, "invariant");
1068   assert(event->should_commit(), "invariant");
1069   event->set_parkedClass((obj != NULL) ? obj->klass() : NULL);
1070   event->set_timeout(timeout_nanos);
1071   event->set_until(until_epoch_millis);
1072   event->set_address((obj != NULL) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
1073   event->commit();
1074 }
1075 
1076 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
1077   HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
1078   EventThreadPark event;
1079 
1080   JavaThreadParkedState jtps(thread, time != 0);
1081   thread->parker()->park(isAbsolute != 0, time);
1082   if (event.should_commit()) {
1083     const oop obj = thread->current_park_blocker();
1084     if (time == 0) {
1085       post_thread_park_event(&event, obj, min_jlong, min_jlong);
1086     } else {
1087       if (isAbsolute != 0) {
1088         post_thread_park_event(&event, obj, min_jlong, time);
1089       } else {
1090         post_thread_park_event(&event, obj, time, min_jlong);
1091       }
1092     }
1093   }
1094   HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
1095 } UNSAFE_END
1096 
1097 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
1098   Parker* p = NULL;
1099 
1100   if (jthread != NULL) {
1101     ThreadsListHandle tlh;
1102     JavaThread* thr = NULL;
1103     oop java_thread = NULL;
1104     (void) tlh.cv_internal_thread_to_JavaThread(jthread, &thr, &java_thread);
1105     if (java_thread != NULL) {
1106       // This is a valid oop.
1107       jlong lp = java_lang_Thread::park_event(java_thread);
1108       if (lp != 0) {
1109         // This cast is OK even though the jlong might have been read
1110         // non-atomically on 32bit systems, since there, one word will
1111         // always be zero anyway and the value set is always the same
1112         p = (Parker*)addr_from_java(lp);
1113       } else {
1114         // Not cached in the java.lang.Thread oop yet (could be an
1115         // older version of library).
1116         if (thr != NULL) {
1117           // The JavaThread is alive.
1118           p = thr->parker();
1119           if (p != NULL) {
1120             // Cache the Parker in the java.lang.Thread oop for next time.
1121             java_lang_Thread::set_park_event(java_thread, addr_to_java(p));
1122           }
1123         }
1124       }
1125     }
1126   } // ThreadsListHandle is destroyed here.
1127 
1128   if (p != NULL) {
1129     HOTSPOT_THREAD_UNPARK((uintptr_t) p);
1130     p->unpark();
1131   }
1132 } UNSAFE_END
1133 
1134 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
1135   const int max_nelem = 3;
1136   double la[max_nelem];
1137   jint ret;
1138 
1139   typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
1140   assert(a->is_typeArray(), "must be type array");
1141 
1142   ret = os::loadavg(la, nelem);
1143   if (ret == -1) {
1144     return -1;
1145   }
1146 
1147   // if successful, ret is the number of samples actually retrieved.
1148   assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
1149   switch(ret) {
1150     case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
1151     case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
1152     case 1: a->double_at_put(0, (jdouble)la[0]); break;
1153   }
1154 
1155   return ret;
1156 } UNSAFE_END
1157 
1158 
1159 /// JVM_RegisterUnsafeMethods
1160 
1161 #define ADR "J"
1162 
1163 #define LANG "Ljava/lang/"
1164 
1165 #define OBJ LANG "Object;"
1166 #define CLS LANG "Class;"
1167 #define FLD LANG "reflect/Field;"
1168 #define THR LANG "Throwable;"
1169 
1170 #define DC_Args  LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
1171 #define DAC_Args CLS "[B[" OBJ
1172 
1173 #define CC (char*)  /*cast a literal from (const char*)*/
1174 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
1175 
1176 #define DECLARE_GETPUTOOP(Type, Desc) \
1177     {CC "get"  #Type,      CC "(" OBJ "J)" #Desc,                 FN_PTR(Unsafe_Get##Type)}, \
1178     {CC "put"  #Type,      CC "(" OBJ "J" #Desc ")V",             FN_PTR(Unsafe_Put##Type)}, \
1179     {CC "get"  #Type "Volatile",      CC "(" OBJ "J)" #Desc,      FN_PTR(Unsafe_Get##Type##Volatile)}, \
1180     {CC "put"  #Type "Volatile",      CC "(" OBJ "J" #Desc ")V",  FN_PTR(Unsafe_Put##Type##Volatile)}
1181 
1182 
1183 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
1184     {CC "getReference",         CC "(" OBJ "J)" OBJ "",   FN_PTR(Unsafe_GetReference)},
1185     {CC "putReference",         CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReference)},
1186     {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ,      FN_PTR(Unsafe_GetReferenceVolatile)},
1187     {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReferenceVolatile)},
1188 
1189     {CC "isFlattenedArray", CC "(" CLS ")Z",                     FN_PTR(Unsafe_IsFlattenedArray)},
1190     {CC "getValue",         CC "(" OBJ "J" CLS ")" OBJ,          FN_PTR(Unsafe_GetValue)},
1191     {CC "putValue",         CC "(" OBJ "J" CLS OBJ ")V",         FN_PTR(Unsafe_PutValue)},
1192     {CC "uninitializedDefaultValue", CC "(" CLS ")" OBJ,         FN_PTR(Unsafe_UninitializedDefaultValue)},
1193     {CC "makePrivateBuffer",     CC "(" OBJ ")" OBJ,             FN_PTR(Unsafe_MakePrivateBuffer)},
1194     {CC "finishPrivateBuffer",   CC "(" OBJ ")" OBJ,             FN_PTR(Unsafe_FinishPrivateBuffer)},
1195     {CC "valueHeaderSize",       CC "(" CLS ")J",                FN_PTR(Unsafe_ValueHeaderSize)},
1196 
1197     {CC "getUncompressedObject", CC "(" ADR ")" OBJ,  FN_PTR(Unsafe_GetUncompressedObject)},
1198 
1199     DECLARE_GETPUTOOP(Boolean, Z),
1200     DECLARE_GETPUTOOP(Byte, B),
1201     DECLARE_GETPUTOOP(Short, S),
1202     DECLARE_GETPUTOOP(Char, C),
1203     DECLARE_GETPUTOOP(Int, I),
1204     DECLARE_GETPUTOOP(Long, J),
1205     DECLARE_GETPUTOOP(Float, F),
1206     DECLARE_GETPUTOOP(Double, D),
1207 
1208     {CC "allocateMemory0",    CC "(J)" ADR,              FN_PTR(Unsafe_AllocateMemory0)},
1209     {CC "reallocateMemory0",  CC "(" ADR "J)" ADR,       FN_PTR(Unsafe_ReallocateMemory0)},
1210     {CC "freeMemory0",        CC "(" ADR ")V",           FN_PTR(Unsafe_FreeMemory0)},
1211 
1212     {CC "objectFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_ObjectFieldOffset0)},
1213     {CC "objectFieldOffset1", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_ObjectFieldOffset1)},
1214     {CC "staticFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_StaticFieldOffset0)},
1215     {CC "staticFieldBase0",   CC "(" FLD ")" OBJ,        FN_PTR(Unsafe_StaticFieldBase0)},
1216     {CC "ensureClassInitialized0", CC "(" CLS ")V",      FN_PTR(Unsafe_EnsureClassInitialized0)},
1217     {CC "arrayBaseOffset0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayBaseOffset0)},
1218     {CC "arrayIndexScale0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayIndexScale0)},
1219     {CC "addressSize0",       CC "()I",                  FN_PTR(Unsafe_AddressSize0)},
1220     {CC "pageSize",           CC "()I",                  FN_PTR(Unsafe_PageSize)},
1221 
1222     {CC "defineClass0",       CC "(" DC_Args ")" CLS,    FN_PTR(Unsafe_DefineClass0)},
1223     {CC "allocateInstance",   CC "(" CLS ")" OBJ,        FN_PTR(Unsafe_AllocateInstance)},
1224     {CC "throwException",     CC "(" THR ")V",           FN_PTR(Unsafe_ThrowException)},
1225     {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
1226     {CC "compareAndSetInt",   CC "(" OBJ "J""I""I"")Z",  FN_PTR(Unsafe_CompareAndSetInt)},
1227     {CC "compareAndSetLong",  CC "(" OBJ "J""J""J"")Z",  FN_PTR(Unsafe_CompareAndSetLong)},
1228     {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
1229     {CC "compareAndExchangeInt",  CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
1230     {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
1231 
1232     {CC "park",               CC "(ZJ)V",                FN_PTR(Unsafe_Park)},
1233     {CC "unpark",             CC "(" OBJ ")V",           FN_PTR(Unsafe_Unpark)},
1234 
1235     {CC "getLoadAverage0",    CC "([DI)I",               FN_PTR(Unsafe_GetLoadAverage0)},
1236 
1237     {CC "copyMemory0",        CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
1238     {CC "copySwapMemory0",    CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
1239     {CC "setMemory0",         CC "(" OBJ "JJB)V",        FN_PTR(Unsafe_SetMemory0)},
1240 
1241     {CC "defineAnonymousClass0", CC "(" DAC_Args ")" CLS, FN_PTR(Unsafe_DefineAnonymousClass0)},
1242 
1243     {CC "shouldBeInitialized0", CC "(" CLS ")Z",         FN_PTR(Unsafe_ShouldBeInitialized0)},
1244 
1245     {CC "loadFence",          CC "()V",                  FN_PTR(Unsafe_LoadFence)},
1246     {CC "storeFence",         CC "()V",                  FN_PTR(Unsafe_StoreFence)},
1247     {CC "fullFence",          CC "()V",                  FN_PTR(Unsafe_FullFence)},
1248 
1249     {CC "isBigEndian0",       CC "()Z",                  FN_PTR(Unsafe_isBigEndian0)},
1250     {CC "unalignedAccess0",   CC "()Z",                  FN_PTR(Unsafe_unalignedAccess0)}
1251 };
1252 
1253 #undef CC
1254 #undef FN_PTR
1255 
1256 #undef ADR
1257 #undef LANG
1258 #undef OBJ
1259 #undef CLS
1260 #undef FLD
1261 #undef THR
1262 #undef DC_Args
1263 #undef DAC_Args
1264 
1265 #undef DECLARE_GETPUTOOP
1266 
1267 
1268 // This function is exported, used by NativeLookup.
1269 // The Unsafe_xxx functions above are called only from the interpreter.
1270 // The optimizer looks at names and signatures to recognize
1271 // individual functions.
1272 
1273 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
1274   ThreadToNativeFromVM ttnfv(thread);
1275 
1276   int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
1277   guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
1278 } JVM_END