< prev index next >

src/share/vm/prims/unsafe.cpp

Print this page
rev 11938 : 8165489: Missing G1 barrier in Unsafe_GetObjectVolatile
Reviewed-by:


 246 
 247     jlong x = Atomic::load(p);
 248 
 249     return x;
 250   }
 251 
 252   void put_jlong_locked(jlong x) {
 253     GuardUnsafeAccess guard(_thread, _obj);
 254 
 255     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 256 
 257     jlong* p = (jlong*)addr();
 258 
 259     Atomic::store(normalize(x),  p);
 260   }
 261 #endif
 262 };
 263 
 264 // Get/PutObject must be special-cased, since it works with handles.
 265 















 266 // These functions allow a null base pointer with an arbitrary address.
 267 // But if the base pointer is non-null, the offset should make some sense.
 268 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 269 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 270   oop p = JNIHandles::resolve(obj);
 271   oop v;
 272 
 273   if (UseCompressedOops) {
 274     narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
 275     v = oopDesc::decode_heap_oop(n);
 276   } else {
 277     v = *(oop*)index_oop_from_field_offset_long(p, offset);
 278   }
 279 
 280   jobject ret = JNIHandles::make_local(env, v);
 281 
 282 #if INCLUDE_ALL_GCS
 283   // We could be accessing the referent field in a reference
 284   // object. If G1 is enabled then we need to register non-null
 285   // referent with the SATB barrier.
 286   if (UseG1GC) {
 287     bool needs_barrier = false;
 288 
 289     if (ret != NULL) {
 290       if (offset == java_lang_ref_Reference::referent_offset && obj != NULL) {
 291         oop o = JNIHandles::resolve(obj);
 292         Klass* k = o->klass();
 293         if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
 294           assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
 295           needs_barrier = true;
 296         }
 297       }
 298     }
 299 
 300     if (needs_barrier) {
 301       oop referent = JNIHandles::resolve(ret);
 302       G1SATBCardTableModRefBS::enqueue(referent);
 303     }
 304   }
 305 #endif // INCLUDE_ALL_GCS
 306 
 307   return ret;
 308 } UNSAFE_END
 309 
 310 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 311   oop x = JNIHandles::resolve(x_h);
 312   oop p = JNIHandles::resolve(obj);
 313 
 314   if (UseCompressedOops) {
 315     oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
 316   } else {
 317     oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
 318   }
 319 } UNSAFE_END
 320 
 321 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 322   oop p = JNIHandles::resolve(obj);
 323   void* addr = index_oop_from_field_offset_long(p, offset);
 324 
 325   volatile oop v;
 326 
 327   if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
 328     OrderAccess::fence();
 329   }
 330 
 331   if (UseCompressedOops) {
 332     volatile narrowOop n = *(volatile narrowOop*) addr;
 333     (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
 334   } else {
 335     (void)const_cast<oop&>(v = *(volatile oop*) addr);
 336   }




 337 
 338   OrderAccess::acquire();
 339   return JNIHandles::make_local(env, v);
 340 } UNSAFE_END
 341 
 342 UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 343   oop x = JNIHandles::resolve(x_h);
 344   oop p = JNIHandles::resolve(obj);
 345   void* addr = index_oop_from_field_offset_long(p, offset);
 346   OrderAccess::release();
 347 
 348   if (UseCompressedOops) {
 349     oop_store((narrowOop*)addr, x);
 350   } else {
 351     oop_store((oop*)addr, x);
 352   }
 353 
 354   OrderAccess::fence();
 355 } UNSAFE_END
 356 




 246 
 247     jlong x = Atomic::load(p);
 248 
 249     return x;
 250   }
 251 
 252   void put_jlong_locked(jlong x) {
 253     GuardUnsafeAccess guard(_thread, _obj);
 254 
 255     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 256 
 257     jlong* p = (jlong*)addr();
 258 
 259     Atomic::store(normalize(x),  p);
 260   }
 261 #endif
 262 };
 263 
 264 // Get/PutObject must be special-cased, since it works with handles.
 265 
 266 #if INCLUDE_ALL_GCS
 267 // We could be accessing the referent field in a reference
 268 // object. If G1 is enabled then we need to register non-null
 269 // referent with the SATB barrier.
 270 inline void G1SATB_registerReference(oop o, jlong offset, oop v) {
 271   if (UseG1GC && offset == java_lang_ref_Reference::referent_offset && o != NULL && v != NULL) {
 272     Klass* k = o->klass();
 273     if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
 274       assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
 275       G1SATBCardTableModRefBS::enqueue(v);
 276     }
 277   }
 278 }
 279 #endif
 280 
 281 // These functions allow a null base pointer with an arbitrary address.
 282 // But if the base pointer is non-null, the offset should make some sense.
 283 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 284 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 285   oop p = JNIHandles::resolve(obj);
 286   oop v;
 287 
 288   if (UseCompressedOops) {
 289     narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
 290     v = oopDesc::decode_heap_oop(n);
 291   } else {
 292     v = *(oop*)index_oop_from_field_offset_long(p, offset);
 293   }
 294 


 295 #if INCLUDE_ALL_GCS
 296   G1SATB_registerReference(p, offset, v);
 297 #endif














 298 
 299   return JNIHandles::make_local(env, v);







 300 } UNSAFE_END
 301 
 302 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 303   oop x = JNIHandles::resolve(x_h);
 304   oop p = JNIHandles::resolve(obj);
 305 
 306   if (UseCompressedOops) {
 307     oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
 308   } else {
 309     oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
 310   }
 311 } UNSAFE_END
 312 
 313 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 314   oop p = JNIHandles::resolve(obj);
 315   void* addr = index_oop_from_field_offset_long(p, offset);
 316 
 317   volatile oop v;
 318 
 319   if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
 320     OrderAccess::fence();
 321   }
 322 
 323   if (UseCompressedOops) {
 324     volatile narrowOop n = *(volatile narrowOop*) addr;
 325     (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
 326   } else {
 327     (void)const_cast<oop&>(v = *(volatile oop*) addr);
 328   }
 329 
 330 #if INCLUDE_ALL_GCS
 331   G1SATB_registerReference(p, offset, v);
 332 #endif
 333 
 334   OrderAccess::acquire();
 335   return JNIHandles::make_local(env, v);
 336 } UNSAFE_END
 337 
 338 UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 339   oop x = JNIHandles::resolve(x_h);
 340   oop p = JNIHandles::resolve(obj);
 341   void* addr = index_oop_from_field_offset_long(p, offset);
 342   OrderAccess::release();
 343 
 344   if (UseCompressedOops) {
 345     oop_store((narrowOop*)addr, x);
 346   } else {
 347     oop_store((oop*)addr, x);
 348   }
 349 
 350   OrderAccess::fence();
 351 } UNSAFE_END
 352 


< prev index next >