< prev index next >

src/hotspot/share/prims/unsafe.cpp

Print this page




 248     }
 249   }
 250 
 251   void put_volatile(T x) {
 252     if (_obj == NULL) {
 253       GuardUnsafeAccess guard(_thread);
 254       RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 255     } else {
 256       HeapAccess<MO_SEQ_CST>::store_at(_obj, _offset, normalize_for_write(x));
 257     }
 258   }
 259 };
 260 
 261 // These functions allow a null base pointer with an arbitrary address.
 262 // But if the base pointer is non-null, the offset should make some sense.
 263 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 264 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 265   oop p = JNIHandles::resolve(obj);
 266   assert_field_offset_sane(p, offset);
 267   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 268   return JNIHandles::make_local(env, v);
 269 } UNSAFE_END
 270 
 271 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 272   oop x = JNIHandles::resolve(x_h);
 273   oop p = JNIHandles::resolve(obj);
 274   assert_field_offset_sane(p, offset);
 275   HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 276 } UNSAFE_END
 277 
 278 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 279   oop p = JNIHandles::resolve(obj);
 280   assert_field_offset_sane(p, offset);
 281   oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 282   return JNIHandles::make_local(env, v);
 283 } UNSAFE_END
 284 
 285 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 286   oop x = JNIHandles::resolve(x_h);
 287   oop p = JNIHandles::resolve(obj);
 288   assert_field_offset_sane(p, offset);
 289   HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 290 } UNSAFE_END
 291 
 292 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 293   oop v = *(oop*) (address) addr;
 294   return JNIHandles::make_local(env, v);
 295 } UNSAFE_END
 296 
 297 #define DEFINE_GETSETOOP(java_type, Type) \
 298  \
 299 UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 300   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 301 } UNSAFE_END \
 302  \
 303 UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 304   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 305 } UNSAFE_END \
 306  \
 307 // END DEFINE_GETSETOOP.
 308 
 309 DEFINE_GETSETOOP(jboolean, Boolean)
 310 DEFINE_GETSETOOP(jbyte, Byte)
 311 DEFINE_GETSETOOP(jshort, Short);
 312 DEFINE_GETSETOOP(jchar, Char);
 313 DEFINE_GETSETOOP(jint, Int);
 314 DEFINE_GETSETOOP(jlong, Long);


 339 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 340 
 341 #undef DEFINE_GETSETOOP_VOLATILE
 342 
 343 UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
 344   OrderAccess::acquire();
 345 } UNSAFE_END
 346 
 347 UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {
 348   OrderAccess::release();
 349 } UNSAFE_END
 350 
 351 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
 352   OrderAccess::fence();
 353 } UNSAFE_END
 354 
 355 ////// Allocation requests
 356 
 357 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
 358   instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(cls), CHECK_NULL);
 359   return JNIHandles::make_local(env, i);
 360 } UNSAFE_END
 361 
 362 UNSAFE_ENTRY(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
 363   size_t sz = (size_t)size;
 364 
 365   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 366 
 367   void* x = os::malloc(sz, mtOther);
 368 
 369   return addr_to_java(x);
 370 } UNSAFE_END
 371 
 372 UNSAFE_ENTRY(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
 373   void* p = addr_from_java(addr);
 374   size_t sz = (size_t)size;
 375 
 376   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 377 
 378   void* x = os::realloc(p, sz, mtOther);
 379 


 548 
 549 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
 550   assert(field != NULL, "field must not be NULL");
 551 
 552   // Note:  In this VM implementation, a field address is always a short
 553   // offset from the base of a a klass metaobject.  Thus, the full dynamic
 554   // range of the return type is never used.  However, some implementations
 555   // might put the static field inside an array shared by many classes,
 556   // or even at a fixed address, in which case the address could be quite
 557   // large.  In that last case, this function would return NULL, since
 558   // the address would operate alone, without any base pointer.
 559 
 560   oop reflected   = JNIHandles::resolve_non_null(field);
 561   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 562   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 563 
 564   if ((modifiers & JVM_ACC_STATIC) == 0) {
 565     THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 566   }
 567 
 568   return JNIHandles::make_local(env, mirror);
 569 } UNSAFE_END
 570 
 571 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 572   assert(clazz != NULL, "clazz must not be NULL");
 573 
 574   oop mirror = JNIHandles::resolve_non_null(clazz);
 575 
 576   Klass* klass = java_lang_Class::as_Klass(mirror);
 577   if (klass != NULL && klass->should_be_initialized()) {
 578     InstanceKlass* k = InstanceKlass::cast(klass);
 579     k->initialize(CHECK);
 580   }
 581 }
 582 UNSAFE_END
 583 
 584 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 585   assert(clazz != NULL, "clazz must not be NULL");
 586 
 587   oop mirror = JNIHandles::resolve_non_null(clazz);
 588   Klass* klass = java_lang_Class::as_Klass(mirror);


 864   Klass* anonk = SystemDictionary::parse_stream(no_class_name,
 865                                                 host_loader,
 866                                                 &st,
 867                                                 cl_info,
 868                                                 CHECK_NULL);
 869   if (anonk == NULL) {
 870     return NULL;
 871   }
 872 
 873   return InstanceKlass::cast(anonk);
 874 }
 875 
 876 UNSAFE_ENTRY(jclass, Unsafe_DefineAnonymousClass0(JNIEnv *env, jobject unsafe, jclass host_class, jbyteArray data, jobjectArray cp_patches_jh)) {
 877   ResourceMark rm(THREAD);
 878 
 879   jobject res_jh = NULL;
 880   u1* temp_alloc = NULL;
 881 
 882   InstanceKlass* anon_klass = Unsafe_DefineAnonymousClass_impl(env, host_class, data, cp_patches_jh, &temp_alloc, THREAD);
 883   if (anon_klass != NULL) {
 884     res_jh = JNIHandles::make_local(env, anon_klass->java_mirror());
 885   }
 886 
 887   // try/finally clause:
 888   FREE_C_HEAP_ARRAY(u1, temp_alloc);
 889 
 890   // The anonymous class loader data has been artificially been kept alive to
 891   // this point.   The mirror and any instances of this class have to keep
 892   // it alive afterwards.
 893   if (anon_klass != NULL) {
 894     anon_klass->class_loader_data()->dec_keep_alive();
 895   }
 896 
 897   // let caller initialize it as needed...
 898 
 899   return (jclass) res_jh;
 900 } UNSAFE_END
 901 
 902 
 903 
 904 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
 905   ThreadToNativeFromVM ttnfv(thread);
 906   env->Throw(thr);
 907 } UNSAFE_END
 908 
 909 // JSR166 ------------------------------------------------------------------
 910 
 911 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 912   oop x = JNIHandles::resolve(x_h);
 913   oop e = JNIHandles::resolve(e_h);
 914   oop p = JNIHandles::resolve(obj);
 915   assert_field_offset_sane(p, offset);
 916   oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 917   return JNIHandles::make_local(env, res);
 918 } UNSAFE_END
 919 
 920 UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 921   oop p = JNIHandles::resolve(obj);
 922   if (p == NULL) {
 923     volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 924     return RawAccess<>::atomic_cmpxchg(addr, e, x);
 925   } else {
 926     assert_field_offset_sane(p, offset);
 927     return HeapAccess<>::atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 928   }
 929 } UNSAFE_END
 930 
 931 UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 932   oop p = JNIHandles::resolve(obj);
 933   if (p == NULL) {
 934     volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 935     return RawAccess<>::atomic_cmpxchg(addr, e, x);
 936   } else {
 937     assert_field_offset_sane(p, offset);




 248     }
 249   }
 250 
 251   void put_volatile(T x) {
 252     if (_obj == NULL) {
 253       GuardUnsafeAccess guard(_thread);
 254       RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 255     } else {
 256       HeapAccess<MO_SEQ_CST>::store_at(_obj, _offset, normalize_for_write(x));
 257     }
 258   }
 259 };
 260 
 261 // These functions allow a null base pointer with an arbitrary address.
 262 // But if the base pointer is non-null, the offset should make some sense.
 263 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 264 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 265   oop p = JNIHandles::resolve(obj);
 266   assert_field_offset_sane(p, offset);
 267   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 268   return JNIHandles::make_local(THREAD, v);
 269 } UNSAFE_END
 270 
 271 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 272   oop x = JNIHandles::resolve(x_h);
 273   oop p = JNIHandles::resolve(obj);
 274   assert_field_offset_sane(p, offset);
 275   HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 276 } UNSAFE_END
 277 
 278 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 279   oop p = JNIHandles::resolve(obj);
 280   assert_field_offset_sane(p, offset);
 281   oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 282   return JNIHandles::make_local(THREAD, v);
 283 } UNSAFE_END
 284 
 285 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 286   oop x = JNIHandles::resolve(x_h);
 287   oop p = JNIHandles::resolve(obj);
 288   assert_field_offset_sane(p, offset);
 289   HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 290 } UNSAFE_END
 291 
 292 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 293   oop v = *(oop*) (address) addr;
 294   return JNIHandles::make_local(THREAD, v);
 295 } UNSAFE_END
 296 
 297 #define DEFINE_GETSETOOP(java_type, Type) \
 298  \
 299 UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 300   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 301 } UNSAFE_END \
 302  \
 303 UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 304   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 305 } UNSAFE_END \
 306  \
 307 // END DEFINE_GETSETOOP.
 308 
 309 DEFINE_GETSETOOP(jboolean, Boolean)
 310 DEFINE_GETSETOOP(jbyte, Byte)
 311 DEFINE_GETSETOOP(jshort, Short);
 312 DEFINE_GETSETOOP(jchar, Char);
 313 DEFINE_GETSETOOP(jint, Int);
 314 DEFINE_GETSETOOP(jlong, Long);


 339 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 340 
 341 #undef DEFINE_GETSETOOP_VOLATILE
 342 
 343 UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
 344   OrderAccess::acquire();
 345 } UNSAFE_END
 346 
 347 UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {
 348   OrderAccess::release();
 349 } UNSAFE_END
 350 
 351 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
 352   OrderAccess::fence();
 353 } UNSAFE_END
 354 
 355 ////// Allocation requests
 356 
 357 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
 358   instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(cls), CHECK_NULL);
 359   return JNIHandles::make_local(THREAD, i);
 360 } UNSAFE_END
 361 
 362 UNSAFE_ENTRY(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
 363   size_t sz = (size_t)size;
 364 
 365   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 366 
 367   void* x = os::malloc(sz, mtOther);
 368 
 369   return addr_to_java(x);
 370 } UNSAFE_END
 371 
 372 UNSAFE_ENTRY(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
 373   void* p = addr_from_java(addr);
 374   size_t sz = (size_t)size;
 375 
 376   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 377 
 378   void* x = os::realloc(p, sz, mtOther);
 379 


 548 
 549 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
 550   assert(field != NULL, "field must not be NULL");
 551 
 552   // Note:  In this VM implementation, a field address is always a short
 553   // offset from the base of a a klass metaobject.  Thus, the full dynamic
 554   // range of the return type is never used.  However, some implementations
 555   // might put the static field inside an array shared by many classes,
 556   // or even at a fixed address, in which case the address could be quite
 557   // large.  In that last case, this function would return NULL, since
 558   // the address would operate alone, without any base pointer.
 559 
 560   oop reflected   = JNIHandles::resolve_non_null(field);
 561   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 562   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 563 
 564   if ((modifiers & JVM_ACC_STATIC) == 0) {
 565     THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 566   }
 567 
 568   return JNIHandles::make_local(THREAD, mirror);
 569 } UNSAFE_END
 570 
 571 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 572   assert(clazz != NULL, "clazz must not be NULL");
 573 
 574   oop mirror = JNIHandles::resolve_non_null(clazz);
 575 
 576   Klass* klass = java_lang_Class::as_Klass(mirror);
 577   if (klass != NULL && klass->should_be_initialized()) {
 578     InstanceKlass* k = InstanceKlass::cast(klass);
 579     k->initialize(CHECK);
 580   }
 581 }
 582 UNSAFE_END
 583 
 584 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 585   assert(clazz != NULL, "clazz must not be NULL");
 586 
 587   oop mirror = JNIHandles::resolve_non_null(clazz);
 588   Klass* klass = java_lang_Class::as_Klass(mirror);


 864   Klass* anonk = SystemDictionary::parse_stream(no_class_name,
 865                                                 host_loader,
 866                                                 &st,
 867                                                 cl_info,
 868                                                 CHECK_NULL);
 869   if (anonk == NULL) {
 870     return NULL;
 871   }
 872 
 873   return InstanceKlass::cast(anonk);
 874 }
 875 
 876 UNSAFE_ENTRY(jclass, Unsafe_DefineAnonymousClass0(JNIEnv *env, jobject unsafe, jclass host_class, jbyteArray data, jobjectArray cp_patches_jh)) {
 877   ResourceMark rm(THREAD);
 878 
 879   jobject res_jh = NULL;
 880   u1* temp_alloc = NULL;
 881 
 882   InstanceKlass* anon_klass = Unsafe_DefineAnonymousClass_impl(env, host_class, data, cp_patches_jh, &temp_alloc, THREAD);
 883   if (anon_klass != NULL) {
 884     res_jh = JNIHandles::make_local(THREAD, anon_klass->java_mirror());
 885   }
 886 
 887   // try/finally clause:
 888   FREE_C_HEAP_ARRAY(u1, temp_alloc);
 889 
 890   // The anonymous class loader data has been artificially been kept alive to
 891   // this point.   The mirror and any instances of this class have to keep
 892   // it alive afterwards.
 893   if (anon_klass != NULL) {
 894     anon_klass->class_loader_data()->dec_keep_alive();
 895   }
 896 
 897   // let caller initialize it as needed...
 898 
 899   return (jclass) res_jh;
 900 } UNSAFE_END
 901 
 902 
 903 
 904 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
 905   ThreadToNativeFromVM ttnfv(thread);
 906   env->Throw(thr);
 907 } UNSAFE_END
 908 
 909 // JSR166 ------------------------------------------------------------------
 910 
 911 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 912   oop x = JNIHandles::resolve(x_h);
 913   oop e = JNIHandles::resolve(e_h);
 914   oop p = JNIHandles::resolve(obj);
 915   assert_field_offset_sane(p, offset);
 916   oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 917   return JNIHandles::make_local(THREAD, res);
 918 } UNSAFE_END
 919 
 920 UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 921   oop p = JNIHandles::resolve(obj);
 922   if (p == NULL) {
 923     volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 924     return RawAccess<>::atomic_cmpxchg(addr, e, x);
 925   } else {
 926     assert_field_offset_sane(p, offset);
 927     return HeapAccess<>::atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 928   }
 929 } UNSAFE_END
 930 
 931 UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 932   oop p = JNIHandles::resolve(obj);
 933   if (p == NULL) {
 934     volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 935     return RawAccess<>::atomic_cmpxchg(addr, e, x);
 936   } else {
 937     assert_field_offset_sane(p, offset);


< prev index next >