1 /*
   2  * Copyright (c) 2000, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jni.h"
  27 #include "jvm.h"
  28 #include "classfile/classFileStream.hpp"
  29 #include "classfile/classLoader.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "jfr/jfrEvents.hpp"
  32 #include "memory/allocation.inline.hpp"
  33 #include "memory/resourceArea.hpp"
  34 #include "oops/access.inline.hpp"
  35 #include "oops/fieldStreams.hpp"
  36 #include "oops/objArrayOop.inline.hpp"
  37 #include "oops/oop.inline.hpp"
  38 #include "oops/typeArrayOop.inline.hpp"
  39 #include "prims/unsafe.hpp"
  40 #include "runtime/atomic.hpp"
  41 #include "runtime/globals.hpp"
  42 #include "runtime/handles.inline.hpp"
  43 #include "runtime/interfaceSupport.inline.hpp"
  44 #include "runtime/jniHandles.inline.hpp"
  45 #include "runtime/orderAccess.hpp"
  46 #include "runtime/reflection.hpp"
  47 #include "runtime/thread.hpp"
  48 #include "runtime/threadSMR.hpp"
  49 #include "runtime/vm_version.hpp"
  50 #include "services/threadService.hpp"
  51 #include "utilities/align.hpp"
  52 #include "utilities/copy.hpp"
  53 #include "utilities/dtrace.hpp"
  54 #include "utilities/macros.hpp"
  55 
  56 /**
  57  * Implementation of the jdk.internal.misc.Unsafe class
  58  */
  59 
  60 
  61 #define MAX_OBJECT_SIZE \
  62   ( arrayOopDesc::header_size(T_DOUBLE) * HeapWordSize \
  63     + ((julong)max_jint * sizeof(double)) )
  64 
  65 
  66 #define UNSAFE_ENTRY(result_type, header) \
  67   JVM_ENTRY(static result_type, header)
  68 
  69 #define UNSAFE_LEAF(result_type, header) \
  70   JVM_LEAF(static result_type, header)
  71 
  72 #define UNSAFE_END JVM_END
  73 
  74 
  75 static inline void* addr_from_java(jlong addr) {
  76   // This assert fails in a variety of ways on 32-bit systems.
  77   // It is impossible to predict whether native code that converts
  78   // pointers to longs will sign-extend or zero-extend the addresses.
  79   //assert(addr == (uintptr_t)addr, "must not be odd high bits");
  80   return (void*)(uintptr_t)addr;
  81 }
  82 
  83 static inline jlong addr_to_java(void* p) {
  84   assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
  85   return (uintptr_t)p;
  86 }
  87 
  88 
  89 // Note: The VM's obj_field and related accessors use byte-scaled
  90 // ("unscaled") offsets, just as the unsafe methods do.
  91 
  92 // However, the method Unsafe.fieldOffset explicitly declines to
  93 // guarantee this.  The field offset values manipulated by the Java user
  94 // through the Unsafe API are opaque cookies that just happen to be byte
  95 // offsets.  We represent this state of affairs by passing the cookies
  96 // through conversion functions when going between the VM and the Unsafe API.
  97 // The conversion functions just happen to be no-ops at present.
  98 
  99 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
 100   return field_offset;
 101 }
 102 
 103 static inline jlong field_offset_from_byte_offset(jlong byte_offset) {
 104   return byte_offset;
 105 }
 106 
 107 static inline void assert_field_offset_sane(oop p, jlong field_offset) {
 108 #ifdef ASSERT
 109   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 110 
 111   if (p != NULL) {
 112     assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
 113     if (byte_offset == (jint)byte_offset) {
 114       void* ptr_plus_disp = (address)p + byte_offset;
 115       assert(p->field_addr_raw((jint)byte_offset) == ptr_plus_disp,
 116              "raw [ptr+disp] must be consistent with oop::field_addr_raw");
 117     }
 118     jlong p_size = HeapWordSize * (jlong)(p->size());
 119     assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
 120   }
 121 #endif
 122 }
 123 
 124 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
 125   assert_field_offset_sane(p, field_offset);
 126   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 127 
 128   if (p != NULL) {
 129     p = Access<>::resolve(p);
 130   }
 131 
 132   if (sizeof(char*) == sizeof(jint)) {   // (this constant folds!)
 133     return (address)p + (jint) byte_offset;
 134   } else {
 135     return (address)p +        byte_offset;
 136   }
 137 }
 138 
 139 // Externally callable versions:
 140 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 141 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 142   return field_offset;
 143 }
 144 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 145   return byte_offset;
 146 }
 147 
 148 
 149 ///// Data read/writes on the Java heap and in native (off-heap) memory
 150 
 151 /**
 152  * Helper class for accessing memory.
 153  *
 154  * Normalizes values and wraps accesses in
 155  * JavaThread::doing_unsafe_access() if needed.
 156  */
 157 template <typename T>
 158 class MemoryAccess : StackObj {
 159   JavaThread* _thread;
 160   oop _obj;
 161   ptrdiff_t _offset;
 162 
 163   // Resolves and returns the address of the memory access.
 164   // This raw memory access may fault, so we make sure it happens within the
 165   // guarded scope by making the access volatile at least. Since the store
 166   // of Thread::set_doing_unsafe_access() is also volatile, these accesses
 167   // can not be reordered by the compiler. Therefore, if the access triggers
 168   // a fault, we will know that Thread::doing_unsafe_access() returns true.
 169   volatile T* addr() {
 170     void* addr = index_oop_from_field_offset_long(_obj, _offset);
 171     return static_cast<volatile T*>(addr);
 172   }
 173 
 174   template <typename U>
 175   U normalize_for_write(U x) {
 176     return x;
 177   }
 178 
 179   jboolean normalize_for_write(jboolean x) {
 180     return x & 1;
 181   }
 182 
 183   template <typename U>
 184   U normalize_for_read(U x) {
 185     return x;
 186   }
 187 
 188   jboolean normalize_for_read(jboolean x) {
 189     return x != 0;
 190   }
 191 
 192   /**
 193    * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 194    */
 195   class GuardUnsafeAccess {
 196     JavaThread* _thread;
 197 
 198   public:
 199     GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
 200       // native/off-heap access which may raise SIGBUS if accessing
 201       // memory mapped file data in a region of the file which has
 202       // been truncated and is now invalid
 203       _thread->set_doing_unsafe_access(true);
 204     }
 205 
 206     ~GuardUnsafeAccess() {
 207       _thread->set_doing_unsafe_access(false);
 208     }
 209   };
 210 
 211 public:
 212   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 213     : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
 214     assert_field_offset_sane(_obj, offset);
 215   }
 216 
 217   T get() {
 218     if (_obj == NULL) {
 219       GuardUnsafeAccess guard(_thread);
 220       T ret = RawAccess<>::load(addr());
 221       return normalize_for_read(ret);
 222     } else {
 223       T ret = HeapAccess<>::load_at(_obj, _offset);
 224       return normalize_for_read(ret);
 225     }
 226   }
 227 
 228   void put(T x) {
 229     if (_obj == NULL) {
 230       GuardUnsafeAccess guard(_thread);
 231       RawAccess<>::store(addr(), normalize_for_write(x));
 232     } else {
 233       HeapAccess<>::store_at(_obj, _offset, normalize_for_write(x));
 234     }
 235   }
 236 
 237 
 238   T get_volatile() {
 239     if (_obj == NULL) {
 240       GuardUnsafeAccess guard(_thread);
 241       volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
 242       return normalize_for_read(ret);
 243     } else {
 244       T ret = HeapAccess<MO_SEQ_CST>::load_at(_obj, _offset);
 245       return normalize_for_read(ret);
 246     }
 247   }
 248 
 249   void put_volatile(T x) {
 250     if (_obj == NULL) {
 251       GuardUnsafeAccess guard(_thread);
 252       RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 253     } else {
 254       HeapAccess<MO_SEQ_CST>::store_at(_obj, _offset, normalize_for_write(x));
 255     }
 256   }
 257 };
 258 
 259 // These functions allow a null base pointer with an arbitrary address.
 260 // But if the base pointer is non-null, the offset should make some sense.
 261 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 262 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 263   oop p = JNIHandles::resolve(obj);
 264   assert_field_offset_sane(p, offset);
 265   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 266   return JNIHandles::make_local(env, v);
 267 } UNSAFE_END
 268 
 269 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 270   oop x = JNIHandles::resolve(x_h);
 271   oop p = JNIHandles::resolve(obj);
 272   assert_field_offset_sane(p, offset);
 273   HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 274 } UNSAFE_END
 275 
 276 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 277   oop p = JNIHandles::resolve(obj);
 278   assert_field_offset_sane(p, offset);
 279   oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 280   return JNIHandles::make_local(env, v);
 281 } UNSAFE_END
 282 
 283 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 284   oop x = JNIHandles::resolve(x_h);
 285   oop p = JNIHandles::resolve(obj);
 286   assert_field_offset_sane(p, offset);
 287   HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 288 } UNSAFE_END
 289 
 290 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 291   oop v = *(oop*) (address) addr;
 292   return JNIHandles::make_local(env, v);
 293 } UNSAFE_END
 294 
 295 UNSAFE_LEAF(jboolean, Unsafe_isBigEndian0(JNIEnv *env, jobject unsafe)) {
 296 #ifdef VM_LITTLE_ENDIAN
 297   return false;
 298 #else
 299   return true;
 300 #endif
 301 } UNSAFE_END
 302 
 303 UNSAFE_LEAF(jint, Unsafe_unalignedAccess0(JNIEnv *env, jobject unsafe)) {
 304   return UseUnalignedAccesses;
 305 } UNSAFE_END
 306 
 307 #define DEFINE_GETSETOOP(java_type, Type) \
 308  \
 309 UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 310   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 311 } UNSAFE_END \
 312  \
 313 UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 314   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 315 } UNSAFE_END \
 316  \
 317 // END DEFINE_GETSETOOP.
 318 
 319 DEFINE_GETSETOOP(jboolean, Boolean)
 320 DEFINE_GETSETOOP(jbyte, Byte)
 321 DEFINE_GETSETOOP(jshort, Short);
 322 DEFINE_GETSETOOP(jchar, Char);
 323 DEFINE_GETSETOOP(jint, Int);
 324 DEFINE_GETSETOOP(jlong, Long);
 325 DEFINE_GETSETOOP(jfloat, Float);
 326 DEFINE_GETSETOOP(jdouble, Double);
 327 
 328 #undef DEFINE_GETSETOOP
 329 
 330 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
 331  \
 332 UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 333   return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
 334 } UNSAFE_END \
 335  \
 336 UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 337   MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
 338 } UNSAFE_END \
 339  \
 340 // END DEFINE_GETSETOOP_VOLATILE.
 341 
 342 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
 343 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
 344 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
 345 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
 346 DEFINE_GETSETOOP_VOLATILE(jint, Int);
 347 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
 348 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
 349 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 350 
 351 #undef DEFINE_GETSETOOP_VOLATILE
 352 
 353 UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
 354   OrderAccess::acquire();
 355 } UNSAFE_END
 356 
 357 UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {
 358   OrderAccess::release();
 359 } UNSAFE_END
 360 
 361 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
 362   OrderAccess::fence();
 363 } UNSAFE_END
 364 
 365 ////// Allocation requests
 366 
 367 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
 368   ThreadToNativeFromVM ttnfv(thread);
 369   return env->AllocObject(cls);
 370 } UNSAFE_END
 371 
 372 UNSAFE_ENTRY(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
 373   size_t sz = (size_t)size;
 374 
 375   sz = align_up(sz, HeapWordSize);
 376   void* x = os::malloc(sz, mtOther);
 377 
 378   return addr_to_java(x);
 379 } UNSAFE_END
 380 
 381 UNSAFE_ENTRY(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
 382   void* p = addr_from_java(addr);
 383   size_t sz = (size_t)size;
 384   sz = align_up(sz, HeapWordSize);
 385 
 386   void* x = os::realloc(p, sz, mtOther);
 387 
 388   return addr_to_java(x);
 389 } UNSAFE_END
 390 
 391 UNSAFE_ENTRY(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
 392   void* p = addr_from_java(addr);
 393 
 394   os::free(p);
 395 } UNSAFE_END
 396 
 397 UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
 398   size_t sz = (size_t)size;
 399 
 400   oop base = JNIHandles::resolve(obj);
 401   void* p = index_oop_from_field_offset_long(base, offset);
 402 
 403   Copy::fill_to_memory_atomic(p, sz, value);
 404 } UNSAFE_END
 405 
 406 UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
 407   size_t sz = (size_t)size;
 408 
 409   oop srcp = JNIHandles::resolve(srcObj);
 410   oop dstp = JNIHandles::resolve(dstObj);
 411 
 412   void* src = index_oop_from_field_offset_long(srcp, srcOffset);
 413   void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
 414 
 415   Copy::conjoint_memory_atomic(src, dst, sz);
 416 } UNSAFE_END
 417 
 418 // This function is a leaf since if the source and destination are both in native memory
 419 // the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
 420 // If either source or destination (or both) are on the heap, the function will enter VM using
 421 // JVM_ENTRY_FROM_LEAF
 422 UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
 423   size_t sz = (size_t)size;
 424   size_t esz = (size_t)elemSize;
 425 
 426   if (srcObj == NULL && dstObj == NULL) {
 427     // Both src & dst are in native memory
 428     address src = (address)srcOffset;
 429     address dst = (address)dstOffset;
 430 
 431     Copy::conjoint_swap(src, dst, sz, esz);
 432   } else {
 433     // At least one of src/dst are on heap, transition to VM to access raw pointers
 434 
 435     JVM_ENTRY_FROM_LEAF(env, void, Unsafe_CopySwapMemory0) {
 436       oop srcp = JNIHandles::resolve(srcObj);
 437       oop dstp = JNIHandles::resolve(dstObj);
 438 
 439       address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
 440       address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
 441 
 442       Copy::conjoint_swap(src, dst, sz, esz);
 443     } JVM_END
 444   }
 445 } UNSAFE_END
 446 
 447 ////// Random queries
 448 
 449 UNSAFE_LEAF(jint, Unsafe_AddressSize0(JNIEnv *env, jobject unsafe)) {
 450   return sizeof(void*);
 451 } UNSAFE_END
 452 
 453 UNSAFE_LEAF(jint, Unsafe_PageSize()) {
 454   return os::vm_page_size();
 455 } UNSAFE_END
 456 
 457 static jlong find_field_offset(jclass clazz, jstring name, TRAPS) {
 458   assert(clazz != NULL, "clazz must not be NULL");
 459   assert(name != NULL, "name must not be NULL");
 460 
 461   ResourceMark rm(THREAD);
 462   char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
 463 
 464   InstanceKlass* k = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz)));
 465 
 466   jint offset = -1;
 467   for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
 468     Symbol *name = fs.name();
 469     if (name->equals(utf_name)) {
 470       offset = fs.offset();
 471       break;
 472     }
 473   }
 474   if (offset < 0) {
 475     THROW_0(vmSymbols::java_lang_InternalError());
 476   }
 477   return field_offset_from_byte_offset(offset);
 478 }
 479 
 480 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
 481   assert(field != NULL, "field must not be NULL");
 482 
 483   oop reflected   = JNIHandles::resolve_non_null(field);
 484   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 485   Klass* k        = java_lang_Class::as_Klass(mirror);
 486   int slot        = java_lang_reflect_Field::slot(reflected);
 487   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 488 
 489   if (must_be_static >= 0) {
 490     int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
 491     if (must_be_static != really_is_static) {
 492       THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 493     }
 494   }
 495 
 496   int offset = InstanceKlass::cast(k)->field_offset(slot);
 497   return field_offset_from_byte_offset(offset);
 498 }
 499 
 500 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 501   return find_field_offset(field, 0, THREAD);
 502 } UNSAFE_END
 503 
 504 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset1(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
 505   return find_field_offset(c, name, THREAD);
 506 } UNSAFE_END
 507 
 508 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 509   return find_field_offset(field, 1, THREAD);
 510 } UNSAFE_END
 511 
 512 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
 513   assert(field != NULL, "field must not be NULL");
 514 
 515   // Note:  In this VM implementation, a field address is always a short
 516   // offset from the base of a a klass metaobject.  Thus, the full dynamic
 517   // range of the return type is never used.  However, some implementations
 518   // might put the static field inside an array shared by many classes,
 519   // or even at a fixed address, in which case the address could be quite
 520   // large.  In that last case, this function would return NULL, since
 521   // the address would operate alone, without any base pointer.
 522 
 523   oop reflected   = JNIHandles::resolve_non_null(field);
 524   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 525   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 526 
 527   if ((modifiers & JVM_ACC_STATIC) == 0) {
 528     THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 529   }
 530 
 531   return JNIHandles::make_local(env, mirror);
 532 } UNSAFE_END
 533 
 534 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 535   assert(clazz != NULL, "clazz must not be NULL");
 536 
 537   oop mirror = JNIHandles::resolve_non_null(clazz);
 538 
 539   Klass* klass = java_lang_Class::as_Klass(mirror);
 540   if (klass != NULL && klass->should_be_initialized()) {
 541     InstanceKlass* k = InstanceKlass::cast(klass);
 542     k->initialize(CHECK);
 543   }
 544 }
 545 UNSAFE_END
 546 
 547 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 548   assert(clazz != NULL, "clazz must not be NULL");
 549 
 550   oop mirror = JNIHandles::resolve_non_null(clazz);
 551   Klass* klass = java_lang_Class::as_Klass(mirror);
 552 
 553   if (klass != NULL && klass->should_be_initialized()) {
 554     return true;
 555   }
 556 
 557   return false;
 558 }
 559 UNSAFE_END
 560 
 561 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
 562   assert(clazz != NULL, "clazz must not be NULL");
 563 
 564   oop mirror = JNIHandles::resolve_non_null(clazz);
 565   Klass* k = java_lang_Class::as_Klass(mirror);
 566 
 567   if (k == NULL || !k->is_array_klass()) {
 568     THROW(vmSymbols::java_lang_InvalidClassException());
 569   } else if (k->is_objArray_klass()) {
 570     base  = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
 571     scale = heapOopSize;
 572   } else if (k->is_typeArray_klass()) {
 573     TypeArrayKlass* tak = TypeArrayKlass::cast(k);
 574     base  = tak->array_header_in_bytes();
 575     assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
 576     scale = (1 << tak->log2_element_size());
 577   } else {
 578     ShouldNotReachHere();
 579   }
 580 }
 581 
 582 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 583   int base = 0, scale = 0;
 584   getBaseAndScale(base, scale, clazz, CHECK_0);
 585 
 586   return field_offset_from_byte_offset(base);
 587 } UNSAFE_END
 588 
 589 
 590 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 591   int base = 0, scale = 0;
 592   getBaseAndScale(base, scale, clazz, CHECK_0);
 593 
 594   // This VM packs both fields and array elements down to the byte.
 595   // But watch out:  If this changes, so that array references for
 596   // a given primitive type (say, T_BOOLEAN) use different memory units
 597   // than fields, this method MUST return zero for such arrays.
 598   // For example, the VM used to store sub-word sized fields in full
 599   // words in the object layout, so that accessors like getByte(Object,int)
 600   // did not really do what one might expect for arrays.  Therefore,
 601   // this function used to report a zero scale factor, so that the user
 602   // would know not to attempt to access sub-word array elements.
 603   // // Code for unpacked fields:
 604   // if (scale < wordSize)  return 0;
 605 
 606   // The following allows for a pretty general fieldOffset cookie scheme,
 607   // but requires it to be linear in byte offset.
 608   return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
 609 } UNSAFE_END
 610 
 611 
 612 static inline void throw_new(JNIEnv *env, const char *ename) {
 613   jclass cls = env->FindClass(ename);
 614   if (env->ExceptionCheck()) {
 615     env->ExceptionClear();
 616     tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
 617     return;
 618   }
 619 
 620   env->ThrowNew(cls, NULL);
 621 }
 622 
 623 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
 624   // Code lifted from JDK 1.3 ClassLoader.c
 625 
 626   jbyte *body;
 627   char *utfName = NULL;
 628   jclass result = 0;
 629   char buf[128];
 630 
 631   assert(data != NULL, "Class bytes must not be NULL");
 632   assert(length >= 0, "length must not be negative: %d", length);
 633 
 634   if (UsePerfData) {
 635     ClassLoader::unsafe_defineClassCallCounter()->inc();
 636   }
 637 
 638   body = NEW_C_HEAP_ARRAY(jbyte, length, mtInternal);
 639   if (body == NULL) {
 640     throw_new(env, "java/lang/OutOfMemoryError");
 641     return 0;
 642   }
 643 
 644   env->GetByteArrayRegion(data, offset, length, body);
 645   if (env->ExceptionOccurred()) {
 646     goto free_body;
 647   }
 648 
 649   if (name != NULL) {
 650     uint len = env->GetStringUTFLength(name);
 651     int unicode_len = env->GetStringLength(name);
 652 
 653     if (len >= sizeof(buf)) {
 654       utfName = NEW_C_HEAP_ARRAY(char, len + 1, mtInternal);
 655       if (utfName == NULL) {
 656         throw_new(env, "java/lang/OutOfMemoryError");
 657         goto free_body;
 658       }
 659     } else {
 660       utfName = buf;
 661     }
 662 
 663     env->GetStringUTFRegion(name, 0, unicode_len, utfName);
 664 
 665     for (uint i = 0; i < len; i++) {
 666       if (utfName[i] == '.')   utfName[i] = '/';
 667     }
 668   }
 669 
 670   result = JVM_DefineClass(env, utfName, loader, body, length, pd);
 671 
 672   if (utfName && utfName != buf) {
 673     FREE_C_HEAP_ARRAY(char, utfName);
 674   }
 675 
 676  free_body:
 677   FREE_C_HEAP_ARRAY(jbyte, body);
 678   return result;
 679 }
 680 
 681 
 682 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
 683   ThreadToNativeFromVM ttnfv(thread);
 684 
 685   return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
 686 } UNSAFE_END
 687 
 688 
 689 // define a class but do not make it known to the class loader or system dictionary
 690 // - host_class:  supplies context for linkage, access control, protection domain, and class loader
 691 //                if host_class is itself anonymous then it is replaced with its host class.
 692 // - data:  bytes of a class file, a raw memory address (length gives the number of bytes)
 693 // - cp_patches:  where non-null entries exist, they replace corresponding CP entries in data
 694 
 695 // When you load an anonymous class U, it works as if you changed its name just before loading,
 696 // to a name that you will never use again.  Since the name is lost, no other class can directly
 697 // link to any member of U.  Just after U is loaded, the only way to use it is reflectively,
 698 // through java.lang.Class methods like Class.newInstance.
 699 
 700 // The package of an anonymous class must either match its host's class's package or be in the
 701 // unnamed package.  If it is in the unnamed package then it will be put in its host class's
 702 // package.
 703 //
 704 
 705 // Access checks for linkage sites within U continue to follow the same rules as for named classes.
 706 // An anonymous class also has special privileges to access any member of its host class.
 707 // This is the main reason why this loading operation is unsafe.  The purpose of this is to
 708 // allow language implementations to simulate "open classes"; a host class in effect gets
 709 // new code when an anonymous class is loaded alongside it.  A less convenient but more
 710 // standard way to do this is with reflection, which can also be set to ignore access
 711 // restrictions.
 712 
 713 // Access into an anonymous class is possible only through reflection.  Therefore, there
 714 // are no special access rules for calling into an anonymous class.  The relaxed access
 715 // rule for the host class is applied in the opposite direction:  A host class reflectively
 716 // access one of its anonymous classes.
 717 
 718 // If you load the same bytecodes twice, you get two different classes.  You can reload
 719 // the same bytecodes with or without varying CP patches.
 720 
 721 // By using the CP patching array, you can have a new anonymous class U2 refer to an older one U1.
 722 // The bytecodes for U2 should refer to U1 by a symbolic name (doesn't matter what the name is).
 723 // The CONSTANT_Class entry for that name can be patched to refer directly to U1.
 724 
 725 // This allows, for example, U2 to use U1 as a superclass or super-interface, or as
 726 // an outer class (so that U2 is an anonymous inner class of anonymous U1).
 727 // It is not possible for a named class, or an older anonymous class, to refer by
 728 // name (via its CP) to a newer anonymous class.
 729 
 730 // CP patching may also be used to modify (i.e., hack) the names of methods, classes,
 731 // or type descriptors used in the loaded anonymous class.
 732 
 733 // Finally, CP patching may be used to introduce "live" objects into the constant pool,
 734 // instead of "dead" strings.  A compiled statement like println((Object)"hello") can
 735 // be changed to println(greeting), where greeting is an arbitrary object created before
 736 // the anonymous class is loaded.  This is useful in dynamic languages, in which
 737 // various kinds of metaobjects must be introduced as constants into bytecode.
 738 // Note the cast (Object), which tells the verifier to expect an arbitrary object,
 739 // not just a literal string.  For such ldc instructions, the verifier uses the
 740 // type Object instead of String, if the loaded constant is not in fact a String.
 741 
 742 static InstanceKlass*
 743 Unsafe_DefineAnonymousClass_impl(JNIEnv *env,
 744                                  jclass host_class, jbyteArray data, jobjectArray cp_patches_jh,
 745                                  u1** temp_alloc,
 746                                  TRAPS) {
 747   assert(host_class != NULL, "host_class must not be NULL");
 748   assert(data != NULL, "data must not be NULL");
 749 
 750   if (UsePerfData) {
 751     ClassLoader::unsafe_defineClassCallCounter()->inc();
 752   }
 753 
 754   jint length = typeArrayOop(JNIHandles::resolve_non_null(data))->length();
 755   assert(length >= 0, "class_bytes_length must not be negative: %d", length);
 756 
 757   int class_bytes_length = (int) length;
 758 
 759   u1* class_bytes = NEW_C_HEAP_ARRAY(u1, length, mtInternal);
 760   if (class_bytes == NULL) {
 761     THROW_0(vmSymbols::java_lang_OutOfMemoryError());
 762   }
 763 
 764   // caller responsible to free it:
 765   *temp_alloc = class_bytes;
 766 
 767   ArrayAccess<>::arraycopy_to_native(arrayOop(JNIHandles::resolve_non_null(data)), typeArrayOopDesc::element_offset<jbyte>(0),
 768                                      reinterpret_cast<jbyte*>(class_bytes), length);
 769 
 770   objArrayHandle cp_patches_h;
 771   if (cp_patches_jh != NULL) {
 772     oop p = JNIHandles::resolve_non_null(cp_patches_jh);
 773     assert(p->is_objArray(), "cp_patches must be an object[]");
 774     cp_patches_h = objArrayHandle(THREAD, (objArrayOop)p);
 775   }
 776 
 777   const Klass* host_klass = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(host_class));
 778 
 779   // Make sure it's the real host class, not another anonymous class.
 780   while (host_klass != NULL && host_klass->is_instance_klass() &&
 781          InstanceKlass::cast(host_klass)->is_unsafe_anonymous()) {
 782     host_klass = InstanceKlass::cast(host_klass)->unsafe_anonymous_host();
 783   }
 784 
 785   // Primitive types have NULL Klass* fields in their java.lang.Class instances.
 786   if (host_klass == NULL) {
 787     THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), "Host class is null");
 788   }
 789 
 790   assert(host_klass->is_instance_klass(), "Host class must be an instance class");
 791 
 792   const char* host_source = host_klass->external_name();
 793   Handle      host_loader(THREAD, host_klass->class_loader());
 794   Handle      host_domain(THREAD, host_klass->protection_domain());
 795 
 796   GrowableArray<Handle>* cp_patches = NULL;
 797 
 798   if (cp_patches_h.not_null()) {
 799     int alen = cp_patches_h->length();
 800 
 801     for (int i = alen-1; i >= 0; i--) {
 802       oop p = cp_patches_h->obj_at(i);
 803       if (p != NULL) {
 804         Handle patch(THREAD, p);
 805 
 806         if (cp_patches == NULL) {
 807           cp_patches = new GrowableArray<Handle>(i+1, i+1, Handle());
 808         }
 809 
 810         cp_patches->at_put(i, patch);
 811       }
 812     }
 813   }
 814 
 815   ClassFileStream st(class_bytes, class_bytes_length, host_source, ClassFileStream::verify);
 816 
 817   Symbol* no_class_name = NULL;
 818   Klass* anonk = SystemDictionary::parse_stream(no_class_name,
 819                                                 host_loader,
 820                                                 host_domain,
 821                                                 &st,
 822                                                 InstanceKlass::cast(host_klass),
 823                                                 cp_patches,
 824                                                 CHECK_NULL);
 825   if (anonk == NULL) {
 826     return NULL;
 827   }
 828 
 829   return InstanceKlass::cast(anonk);
 830 }
 831 
 832 UNSAFE_ENTRY(jclass, Unsafe_DefineAnonymousClass0(JNIEnv *env, jobject unsafe, jclass host_class, jbyteArray data, jobjectArray cp_patches_jh)) {
 833   ResourceMark rm(THREAD);
 834 
 835   jobject res_jh = NULL;
 836   u1* temp_alloc = NULL;
 837 
 838   InstanceKlass* anon_klass = Unsafe_DefineAnonymousClass_impl(env, host_class, data, cp_patches_jh, &temp_alloc, THREAD);
 839   if (anon_klass != NULL) {
 840     res_jh = JNIHandles::make_local(env, anon_klass->java_mirror());
 841   }
 842 
 843   // try/finally clause:
 844   if (temp_alloc != NULL) {
 845     FREE_C_HEAP_ARRAY(u1, temp_alloc);
 846   }
 847 
 848   // The anonymous class loader data has been artificially been kept alive to
 849   // this point.   The mirror and any instances of this class have to keep
 850   // it alive afterwards.
 851   if (anon_klass != NULL) {
 852     anon_klass->class_loader_data()->dec_keep_alive();
 853   }
 854 
 855   // let caller initialize it as needed...
 856 
 857   return (jclass) res_jh;
 858 } UNSAFE_END
 859 
 860 
 861 
 862 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
 863   ThreadToNativeFromVM ttnfv(thread);
 864   env->Throw(thr);
 865 } UNSAFE_END
 866 
 867 // JSR166 ------------------------------------------------------------------
 868 
 869 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 870   oop x = JNIHandles::resolve(x_h);
 871   oop e = JNIHandles::resolve(e_h);
 872   oop p = JNIHandles::resolve(obj);
 873   assert_field_offset_sane(p, offset);
 874   oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
 875   return JNIHandles::make_local(env, res);
 876 } UNSAFE_END
 877 
 878 UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 879   oop p = JNIHandles::resolve(obj);
 880   if (p == NULL) {
 881     volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 882     return RawAccess<>::atomic_cmpxchg(x, addr, e);
 883   } else {
 884     assert_field_offset_sane(p, offset);
 885     return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
 886   }
 887 } UNSAFE_END
 888 
 889 UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 890   oop p = JNIHandles::resolve(obj);
 891   if (p == NULL) {
 892     volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 893     return RawAccess<>::atomic_cmpxchg(x, addr, e);
 894   } else {
 895     assert_field_offset_sane(p, offset);
 896     return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
 897   }
 898 } UNSAFE_END
 899 
 900 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 901   oop x = JNIHandles::resolve(x_h);
 902   oop e = JNIHandles::resolve(e_h);
 903   oop p = JNIHandles::resolve(obj);
 904   assert_field_offset_sane(p, offset);
 905   oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
 906   return oopDesc::equals(ret, e);
 907 } UNSAFE_END
 908 
 909 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 910   oop p = JNIHandles::resolve(obj);
 911   if (p == NULL) {
 912     volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 913     return RawAccess<>::atomic_cmpxchg(x, addr, e) == e;
 914   } else {
 915     assert_field_offset_sane(p, offset);
 916     return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e) == e;
 917   }
 918 } UNSAFE_END
 919 
 920 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 921   oop p = JNIHandles::resolve(obj);
 922   if (p == NULL) {
 923     volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 924     return RawAccess<>::atomic_cmpxchg(x, addr, e) == e;
 925   } else {
 926     assert_field_offset_sane(p, offset);
 927     return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e) == e;
 928   }
 929 } UNSAFE_END
 930 
 931 static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
 932   assert(event != NULL, "invariant");
 933   assert(event->should_commit(), "invariant");
 934   event->set_parkedClass((obj != NULL) ? obj->klass() : NULL);
 935   event->set_timeout(timeout_nanos);
 936   event->set_until(until_epoch_millis);
 937   event->set_address((obj != NULL) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
 938   event->commit();
 939 }
 940 
 941 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
 942   HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
 943   EventThreadPark event;
 944 
 945   JavaThreadParkedState jtps(thread, time != 0);
 946   thread->parker()->park(isAbsolute != 0, time);
 947   if (event.should_commit()) {
 948     const oop obj = thread->current_park_blocker();
 949     if (time == 0) {
 950       post_thread_park_event(&event, obj, min_jlong, min_jlong);
 951     } else {
 952       if (isAbsolute != 0) {
 953         post_thread_park_event(&event, obj, min_jlong, time);
 954       } else {
 955         post_thread_park_event(&event, obj, time, min_jlong);
 956       }
 957     }
 958   }
 959   HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
 960 } UNSAFE_END
 961 
 962 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
 963   Parker* p = NULL;
 964 
 965   if (jthread != NULL) {
 966     ThreadsListHandle tlh;
 967     JavaThread* thr = NULL;
 968     oop java_thread = NULL;
 969     (void) tlh.cv_internal_thread_to_JavaThread(jthread, &thr, &java_thread);
 970     if (java_thread != NULL) {
 971       // This is a valid oop.
 972       jlong lp = java_lang_Thread::park_event(java_thread);
 973       if (lp != 0) {
 974         // This cast is OK even though the jlong might have been read
 975         // non-atomically on 32bit systems, since there, one word will
 976         // always be zero anyway and the value set is always the same
 977         p = (Parker*)addr_from_java(lp);
 978       } else {
 979         // Not cached in the java.lang.Thread oop yet (could be an
 980         // older version of library).
 981         if (thr != NULL) {
 982           // The JavaThread is alive.
 983           p = thr->parker();
 984           if (p != NULL) {
 985             // Cache the Parker in the java.lang.Thread oop for next time.
 986             java_lang_Thread::set_park_event(java_thread, addr_to_java(p));
 987           }
 988         }
 989       }
 990     }
 991   } // ThreadsListHandle is destroyed here.
 992 
 993   if (p != NULL) {
 994     HOTSPOT_THREAD_UNPARK((uintptr_t) p);
 995     p->unpark();
 996   }
 997 } UNSAFE_END
 998 
 999 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
1000   const int max_nelem = 3;
1001   double la[max_nelem];
1002   jint ret;
1003 
1004   typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
1005   assert(a->is_typeArray(), "must be type array");
1006 
1007   ret = os::loadavg(la, nelem);
1008   if (ret == -1) {
1009     return -1;
1010   }
1011 
1012   // if successful, ret is the number of samples actually retrieved.
1013   assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
1014   switch(ret) {
1015     case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
1016     case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
1017     case 1: a->double_at_put(0, (jdouble)la[0]); break;
1018   }
1019 
1020   return ret;
1021 } UNSAFE_END
1022 
1023 
1024 /// JVM_RegisterUnsafeMethods
1025 
1026 #define ADR "J"
1027 
1028 #define LANG "Ljava/lang/"
1029 
1030 #define OBJ LANG "Object;"
1031 #define CLS LANG "Class;"
1032 #define FLD LANG "reflect/Field;"
1033 #define THR LANG "Throwable;"
1034 
1035 #define DC_Args  LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
1036 #define DAC_Args CLS "[B[" OBJ
1037 
1038 #define CC (char*)  /*cast a literal from (const char*)*/
1039 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
1040 
1041 #define DECLARE_GETPUTOOP(Type, Desc) \
1042     {CC "get" #Type,      CC "(" OBJ "J)" #Desc,       FN_PTR(Unsafe_Get##Type)}, \
1043     {CC "put" #Type,      CC "(" OBJ "J" #Desc ")V",   FN_PTR(Unsafe_Put##Type)}, \
1044     {CC "get" #Type "Volatile",      CC "(" OBJ "J)" #Desc,       FN_PTR(Unsafe_Get##Type##Volatile)}, \
1045     {CC "put" #Type "Volatile",      CC "(" OBJ "J" #Desc ")V",   FN_PTR(Unsafe_Put##Type##Volatile)}
1046 
1047 
1048 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
1049     {CC "getReference",         CC "(" OBJ "J)" OBJ "",   FN_PTR(Unsafe_GetReference)},
1050     {CC "putReference",         CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReference)},
1051     {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ,      FN_PTR(Unsafe_GetReferenceVolatile)},
1052     {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReferenceVolatile)},
1053 
1054     {CC "getUncompressedObject", CC "(" ADR ")" OBJ,  FN_PTR(Unsafe_GetUncompressedObject)},
1055 
1056     DECLARE_GETPUTOOP(Boolean, Z),
1057     DECLARE_GETPUTOOP(Byte, B),
1058     DECLARE_GETPUTOOP(Short, S),
1059     DECLARE_GETPUTOOP(Char, C),
1060     DECLARE_GETPUTOOP(Int, I),
1061     DECLARE_GETPUTOOP(Long, J),
1062     DECLARE_GETPUTOOP(Float, F),
1063     DECLARE_GETPUTOOP(Double, D),
1064 
1065     {CC "allocateMemory0",    CC "(J)" ADR,              FN_PTR(Unsafe_AllocateMemory0)},
1066     {CC "reallocateMemory0",  CC "(" ADR "J)" ADR,       FN_PTR(Unsafe_ReallocateMemory0)},
1067     {CC "freeMemory0",        CC "(" ADR ")V",           FN_PTR(Unsafe_FreeMemory0)},
1068 
1069     {CC "objectFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_ObjectFieldOffset0)},
1070     {CC "objectFieldOffset1", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_ObjectFieldOffset1)},
1071     {CC "staticFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_StaticFieldOffset0)},
1072     {CC "staticFieldBase0",   CC "(" FLD ")" OBJ,        FN_PTR(Unsafe_StaticFieldBase0)},
1073     {CC "ensureClassInitialized0", CC "(" CLS ")V",      FN_PTR(Unsafe_EnsureClassInitialized0)},
1074     {CC "arrayBaseOffset0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayBaseOffset0)},
1075     {CC "arrayIndexScale0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayIndexScale0)},
1076     {CC "addressSize0",       CC "()I",                  FN_PTR(Unsafe_AddressSize0)},
1077     {CC "pageSize",           CC "()I",                  FN_PTR(Unsafe_PageSize)},
1078 
1079     {CC "defineClass0",       CC "(" DC_Args ")" CLS,    FN_PTR(Unsafe_DefineClass0)},
1080     {CC "allocateInstance",   CC "(" CLS ")" OBJ,        FN_PTR(Unsafe_AllocateInstance)},
1081     {CC "throwException",     CC "(" THR ")V",           FN_PTR(Unsafe_ThrowException)},
1082     {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
1083     {CC "compareAndSetInt",   CC "(" OBJ "J""I""I"")Z",  FN_PTR(Unsafe_CompareAndSetInt)},
1084     {CC "compareAndSetLong",  CC "(" OBJ "J""J""J"")Z",  FN_PTR(Unsafe_CompareAndSetLong)},
1085     {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
1086     {CC "compareAndExchangeInt",  CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
1087     {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
1088 
1089     {CC "park",               CC "(ZJ)V",                FN_PTR(Unsafe_Park)},
1090     {CC "unpark",             CC "(" OBJ ")V",           FN_PTR(Unsafe_Unpark)},
1091 
1092     {CC "getLoadAverage0",    CC "([DI)I",               FN_PTR(Unsafe_GetLoadAverage0)},
1093 
1094     {CC "copyMemory0",        CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
1095     {CC "copySwapMemory0",    CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
1096     {CC "setMemory0",         CC "(" OBJ "JJB)V",        FN_PTR(Unsafe_SetMemory0)},
1097 
1098     {CC "defineAnonymousClass0", CC "(" DAC_Args ")" CLS, FN_PTR(Unsafe_DefineAnonymousClass0)},
1099 
1100     {CC "shouldBeInitialized0", CC "(" CLS ")Z",         FN_PTR(Unsafe_ShouldBeInitialized0)},
1101 
1102     {CC "loadFence",          CC "()V",                  FN_PTR(Unsafe_LoadFence)},
1103     {CC "storeFence",         CC "()V",                  FN_PTR(Unsafe_StoreFence)},
1104     {CC "fullFence",          CC "()V",                  FN_PTR(Unsafe_FullFence)},
1105 
1106     {CC "isBigEndian0",       CC "()Z",                  FN_PTR(Unsafe_isBigEndian0)},
1107     {CC "unalignedAccess0",   CC "()Z",                  FN_PTR(Unsafe_unalignedAccess0)}
1108 };
1109 
1110 #undef CC
1111 #undef FN_PTR
1112 
1113 #undef ADR
1114 #undef LANG
1115 #undef OBJ
1116 #undef CLS
1117 #undef FLD
1118 #undef THR
1119 #undef DC_Args
1120 #undef DAC_Args
1121 
1122 #undef DECLARE_GETPUTOOP
1123 
1124 
1125 // This function is exported, used by NativeLookup.
1126 // The Unsafe_xxx functions above are called only from the interpreter.
1127 // The optimizer looks at names and signatures to recognize
1128 // individual functions.
1129 
1130 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
1131   ThreadToNativeFromVM ttnfv(thread);
1132 
1133   int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
1134   guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
1135 } JVM_END