< prev index next >

src/share/vm/prims/unsafe.cpp

Print this page
rev 11983 : 8165489: Missing G1 barrier in Unsafe_GetObjectVolatile
Reviewed-by: kbarrett, mgerdin


 255 
 256     jlong x = Atomic::load(p);
 257 
 258     return x;
 259   }
 260 
 261   void put_jlong_locked(jlong x) {
 262     GuardUnsafeAccess guard(_thread, _obj);
 263 
 264     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 265 
 266     jlong* p = (jlong*)addr();
 267 
 268     Atomic::store(normalize_for_write(x),  p);
 269   }
 270 #endif
 271 };
 272 
 273 // Get/PutObject must be special-cased, since it works with handles.
 274 

























 275 // These functions allow a null base pointer with an arbitrary address.
 276 // But if the base pointer is non-null, the offset should make some sense.
 277 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 278 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 279   oop p = JNIHandles::resolve(obj);
 280   oop v;
 281 
 282   if (UseCompressedOops) {
 283     narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
 284     v = oopDesc::decode_heap_oop(n);
 285   } else {
 286     v = *(oop*)index_oop_from_field_offset_long(p, offset);
 287   }
 288 
 289   jobject ret = JNIHandles::make_local(env, v);
 290 
 291 #if INCLUDE_ALL_GCS
 292   // We could be accessing the referent field in a reference
 293   // object. If G1 is enabled then we need to register non-null
 294   // referent with the SATB barrier.
 295   if (UseG1GC) {
 296     bool needs_barrier = false;
 297 
 298     if (ret != NULL) {
 299       if (offset == java_lang_ref_Reference::referent_offset && obj != NULL) {
 300         oop o = JNIHandles::resolve(obj);
 301         Klass* k = o->klass();
 302         if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
 303           assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
 304           needs_barrier = true;
 305         }
 306       }
 307     }
 308 
 309     if (needs_barrier) {
 310       oop referent = JNIHandles::resolve(ret);
 311       G1SATBCardTableModRefBS::enqueue(referent);
 312     }
 313   }
 314 #endif // INCLUDE_ALL_GCS
 315 
 316   return ret;
 317 } UNSAFE_END
 318 
 319 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 320   oop x = JNIHandles::resolve(x_h);
 321   oop p = JNIHandles::resolve(obj);
 322 
 323   if (UseCompressedOops) {
 324     oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
 325   } else {
 326     oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
 327   }
 328 } UNSAFE_END
 329 
 330 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 331   oop p = JNIHandles::resolve(obj);
 332   void* addr = index_oop_from_field_offset_long(p, offset);
 333 
 334   volatile oop v;
 335 
 336   if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
 337     OrderAccess::fence();
 338   }
 339 
 340   if (UseCompressedOops) {
 341     volatile narrowOop n = *(volatile narrowOop*) addr;
 342     (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
 343   } else {
 344     (void)const_cast<oop&>(v = *(volatile oop*) addr);
 345   }


 346 
 347   OrderAccess::acquire();
 348   return JNIHandles::make_local(env, v);
 349 } UNSAFE_END
 350 
 351 UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 352   oop x = JNIHandles::resolve(x_h);
 353   oop p = JNIHandles::resolve(obj);
 354   void* addr = index_oop_from_field_offset_long(p, offset);
 355   OrderAccess::release();
 356 
 357   if (UseCompressedOops) {
 358     oop_store((narrowOop*)addr, x);
 359   } else {
 360     oop_store((oop*)addr, x);
 361   }
 362 
 363   OrderAccess::fence();
 364 } UNSAFE_END
 365 




 255 
 256     jlong x = Atomic::load(p);
 257 
 258     return x;
 259   }
 260 
 261   void put_jlong_locked(jlong x) {
 262     GuardUnsafeAccess guard(_thread, _obj);
 263 
 264     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 265 
 266     jlong* p = (jlong*)addr();
 267 
 268     Atomic::store(normalize_for_write(x),  p);
 269   }
 270 #endif
 271 };
 272 
 273 // Get/PutObject must be special-cased, since it works with handles.
 274 
 275 // We could be accessing the referent field in a reference
 276 // object. If G1 is enabled then we need to register non-null
 277 // referent with the SATB barrier.
 278 
 279 #if INCLUDE_ALL_GCS
 280 static bool is_java_lang_ref_Reference_access(oop o, jlong offset) {
 281   if (offset == java_lang_ref_Reference::referent_offset && o != NULL) {
 282     Klass* k = o->klass();
 283     if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
 284       assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
 285       return true;
 286     }
 287   }
 288   return false;
 289 }
 290 #endif
 291 
 292 static void ensure_satb_referent_alive(oop o, jlong offset, oop v) {
 293 #if INCLUDE_ALL_GCS
 294   if (UseG1GC && v != NULL && is_java_lang_ref_Reference_access(o, offset)) {
 295     G1SATBCardTableModRefBS::enqueue(v);
 296   }
 297 #endif
 298 }
 299 
 300 // These functions allow a null base pointer with an arbitrary address.
 301 // But if the base pointer is non-null, the offset should make some sense.
 302 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 303 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 304   oop p = JNIHandles::resolve(obj);
 305   oop v;
 306 
 307   if (UseCompressedOops) {
 308     narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
 309     v = oopDesc::decode_heap_oop(n);
 310   } else {
 311     v = *(oop*)index_oop_from_field_offset_long(p, offset);
 312   }
 313 
 314   ensure_satb_referent_alive(p, offset, v);
 315 
 316   return JNIHandles::make_local(env, v);

























 317 } UNSAFE_END
 318 
 319 UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 320   oop x = JNIHandles::resolve(x_h);
 321   oop p = JNIHandles::resolve(obj);
 322 
 323   if (UseCompressedOops) {
 324     oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
 325   } else {
 326     oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
 327   }
 328 } UNSAFE_END
 329 
 330 UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 331   oop p = JNIHandles::resolve(obj);
 332   void* addr = index_oop_from_field_offset_long(p, offset);
 333 
 334   volatile oop v;
 335 
 336   if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
 337     OrderAccess::fence();
 338   }
 339 
 340   if (UseCompressedOops) {
 341     volatile narrowOop n = *(volatile narrowOop*) addr;
 342     (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n));
 343   } else {
 344     (void)const_cast<oop&>(v = *(volatile oop*) addr);
 345   }
 346 
 347   ensure_satb_referent_alive(p, offset, v);
 348 
 349   OrderAccess::acquire();
 350   return JNIHandles::make_local(env, v);
 351 } UNSAFE_END
 352 
 353 UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 354   oop x = JNIHandles::resolve(x_h);
 355   oop p = JNIHandles::resolve(obj);
 356   void* addr = index_oop_from_field_offset_long(p, offset);
 357   OrderAccess::release();
 358 
 359   if (UseCompressedOops) {
 360     oop_store((narrowOop*)addr, x);
 361   } else {
 362     oop_store((oop*)addr, x);
 363   }
 364 
 365   OrderAccess::fence();
 366 } UNSAFE_END
 367 


< prev index next >