< prev index next >

src/share/vm/opto/library_call.cpp

Print this page




2455   switch (kind) {
2456       case Relaxed:
2457           need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
2458           break;
2459       case Opaque:
2460           // Opaque uses CPUOrder membars for protection against code movement.
2461       case Acquire:
2462       case Release:
2463       case Volatile:
2464           need_mem_bar = true;
2465           break;
2466       default:
2467           ShouldNotReachHere();
2468   }
2469 
2470   // Some accesses require access atomicity for all types, notably longs and doubles.
2471   // When AlwaysAtomicAccesses is enabled, all accesses are atomic.
2472   bool requires_atomic_access = false;
2473   switch (kind) {
2474       case Relaxed:
2475       case Opaque:
2476           requires_atomic_access = AlwaysAtomicAccesses;
2477           break;


2478       case Acquire:
2479       case Release:
2480       case Volatile:
2481           requires_atomic_access = true;
2482           break;
2483       default:
2484           ShouldNotReachHere();
2485   }
2486 
2487   // Figure out the memory ordering.
2488   // Acquire/Release/Volatile accesses require marking the loads/stores with MemOrd
2489   MemNode::MemOrd mo = access_kind_to_memord_LS(kind, is_store);
2490 
2491   // If we are reading the value of the referent field of a Reference
2492   // object (either by using Unsafe directly or through reflection)
2493   // then, if G1 is enabled, we need to record the referent in an
2494   // SATB log buffer using the pre-barrier mechanism.
2495   // Also we need to add memory barrier to prevent commoning reads
2496   // from this field across safepoint since GC can change its value.
2497   bool need_read_barrier = !is_native_ptr && !is_store &&




2455   switch (kind) {
2456       case Relaxed:
2457           need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
2458           break;
2459       case Opaque:
2460           // Opaque uses CPUOrder membars for protection against code movement.
2461       case Acquire:
2462       case Release:
2463       case Volatile:
2464           need_mem_bar = true;
2465           break;
2466       default:
2467           ShouldNotReachHere();
2468   }
2469 
2470   // Some accesses require access atomicity for all types, notably longs and doubles.
2471   // When AlwaysAtomicAccesses is enabled, all accesses are atomic.
2472   bool requires_atomic_access = false;
2473   switch (kind) {
2474       case Relaxed:

2475           requires_atomic_access = AlwaysAtomicAccesses;
2476           break;
2477       case Opaque:
2478           // Opaque accesses are atomic.
2479       case Acquire:
2480       case Release:
2481       case Volatile:
2482           requires_atomic_access = true;
2483           break;
2484       default:
2485           ShouldNotReachHere();
2486   }
2487 
2488   // Figure out the memory ordering.
2489   // Acquire/Release/Volatile accesses require marking the loads/stores with MemOrd
2490   MemNode::MemOrd mo = access_kind_to_memord_LS(kind, is_store);
2491 
2492   // If we are reading the value of the referent field of a Reference
2493   // object (either by using Unsafe directly or through reflection)
2494   // then, if G1 is enabled, we need to record the referent in an
2495   // SATB log buffer using the pre-barrier mechanism.
2496   // Also we need to add memory barrier to prevent commoning reads
2497   // from this field across safepoint since GC can change its value.
2498   bool need_read_barrier = !is_native_ptr && !is_store &&


< prev index next >