< prev index next >

src/share/vm/opto/library_call.cpp

Print this page
rev 12619 : 8174164: SafePointNode::_replaced_nodes breaks with irreducible loops
Reviewed-by:
rev 12620 : 8174164: SafePointNode::_replaced_nodes breaks with irreducible loops
Reviewed-by:
rev 12697 : 8176506: C2: loop unswitching and unsafe accesses cause crash
Reviewed-by:
rev 12698 : 8176513: Poor code quality for ByteBuffers
Reviewed-by:
rev 12699 : 8176513: Poor code quality for ByteBuffers
Summary: relax conditions that causes MemBarCPUOrder membars to be added around unsafe accesses
Reviewed-by:


2358       return false;
2359     }
2360     mismatched = (bt != type);
2361   } else if (alias_type->adr_type()->isa_oopptr()) {
2362     mismatched = true; // conservatively mark all "wide" on-heap accesses as mismatched
2363   }
2364 
2365   assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched");
2366 
2367   // First guess at the value type.
2368   const Type *value_type = Type::get_const_basic_type(type);
2369 
2370   // We will need memory barriers unless we can determine a unique
2371   // alias category for this reference.  (Note:  If for some reason
2372   // the barriers get omitted and the unsafe reference begins to "pollute"
2373   // the alias analysis of the rest of the graph, either Compile::can_alias
2374   // or Compile::must_alias will throw a diagnostic assert.)
2375   bool need_mem_bar;
2376   switch (kind) {
2377       case Relaxed:
2378           need_mem_bar = mismatched || can_access_non_heap;
2379           break;
2380       case Opaque:
2381           // Opaque uses CPUOrder membars for protection against code movement.
2382       case Acquire:
2383       case Release:
2384       case Volatile:
2385           need_mem_bar = true;
2386           break;
2387       default:
2388           ShouldNotReachHere();
2389   }
2390 
2391   // Some accesses require access atomicity for all types, notably longs and doubles.
2392   // When AlwaysAtomicAccesses is enabled, all accesses are atomic.
2393   bool requires_atomic_access = false;
2394   switch (kind) {
2395       case Relaxed:
2396           requires_atomic_access = AlwaysAtomicAccesses;
2397           break;
2398       case Opaque:




2358       return false;
2359     }
2360     mismatched = (bt != type);
2361   } else if (alias_type->adr_type()->isa_oopptr()) {
2362     mismatched = true; // conservatively mark all "wide" on-heap accesses as mismatched
2363   }
2364 
2365   assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched");
2366 
2367   // First guess at the value type.
2368   const Type *value_type = Type::get_const_basic_type(type);
2369 
2370   // We will need memory barriers unless we can determine a unique
2371   // alias category for this reference.  (Note:  If for some reason
2372   // the barriers get omitted and the unsafe reference begins to "pollute"
2373   // the alias analysis of the rest of the graph, either Compile::can_alias
2374   // or Compile::must_alias will throw a diagnostic assert.)
2375   bool need_mem_bar;
2376   switch (kind) {
2377       case Relaxed:
2378           need_mem_bar = mismatched & !adr_type->isa_aryptr();
2379           break;
2380       case Opaque:
2381           // Opaque uses CPUOrder membars for protection against code movement.
2382       case Acquire:
2383       case Release:
2384       case Volatile:
2385           need_mem_bar = true;
2386           break;
2387       default:
2388           ShouldNotReachHere();
2389   }
2390 
2391   // Some accesses require access atomicity for all types, notably longs and doubles.
2392   // When AlwaysAtomicAccesses is enabled, all accesses are atomic.
2393   bool requires_atomic_access = false;
2394   switch (kind) {
2395       case Relaxed:
2396           requires_atomic_access = AlwaysAtomicAccesses;
2397           break;
2398       case Opaque:


< prev index next >