< prev index next >

src/hotspot/share/opto/library_call.cpp

Print this page
rev 49175 : 8198950: AArch64: org.openjdk.jcstress.tests.varhandles.DekkerTest fails
Summary: Fix breakage to AArch64 code generation caused by JDK-8181211
Reviewed-by: duke


2561   // Memory barrier to prevent normal and 'unsafe' accesses from
2562   // bypassing each other.  Happens after null checks, so the
2563   // exception paths do not take memory state from the memory barrier,
2564   // so there's no problems making a strong assert about mixing users
2565   // of safe & unsafe memory.
2566   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2567 
2568   if (!is_store) {
2569     Node* p = NULL;
2570     // Try to constant fold a load from a constant field
2571     ciField* field = alias_type->field();
2572     if (heap_base_oop != top() && field != NULL && field->is_constant() && !mismatched) {
2573       // final or stable field
2574       p = make_constant_from_field(field, heap_base_oop);
2575     }
2576     if (p == NULL) {
2577       // To be valid, unsafe loads may depend on other conditions than
2578       // the one that guards them: pin the Load node
2579       LoadNode::ControlDependency dep = LoadNode::Pinned;
2580       Node* ctrl = control();
2581       if (adr_type->isa_instptr()) {

2582         assert(adr_type->meet(TypePtr::NULL_PTR) != adr_type->remove_speculative(), "should be not null");
2583         intptr_t offset = Type::OffsetBot;
2584         AddPNode::Ideal_base_and_offset(adr, &_gvn, offset);
2585         if (offset >= 0) {
2586           int s = Klass::layout_helper_size_in_bytes(adr_type->isa_instptr()->klass()->layout_helper());
2587           if (offset < s) {
2588             // Guaranteed to be a valid access, no need to pin it
2589             dep = LoadNode::DependsOnlyOnTest;
2590             ctrl = NULL;
2591           }
2592         }
2593       }
2594       p = make_load(ctrl, adr, value_type, type, adr_type, mo, dep, requires_atomic_access, unaligned, mismatched);
2595       // load value
2596       switch (type) {
2597       case T_BOOLEAN:
2598       {
2599         // Normalize the value returned by getBoolean in the following cases
2600         if (mismatched ||
2601             heap_base_oop == top() ||                            // - heap_base_oop is NULL or




2561   // Memory barrier to prevent normal and 'unsafe' accesses from
2562   // bypassing each other.  Happens after null checks, so the
2563   // exception paths do not take memory state from the memory barrier,
2564   // so there's no problems making a strong assert about mixing users
2565   // of safe & unsafe memory.
2566   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2567 
2568   if (!is_store) {
2569     Node* p = NULL;
2570     // Try to constant fold a load from a constant field
2571     ciField* field = alias_type->field();
2572     if (heap_base_oop != top() && field != NULL && field->is_constant() && !mismatched) {
2573       // final or stable field
2574       p = make_constant_from_field(field, heap_base_oop);
2575     }
2576     if (p == NULL) {
2577       // To be valid, unsafe loads may depend on other conditions than
2578       // the one that guards them: pin the Load node
2579       LoadNode::ControlDependency dep = LoadNode::Pinned;
2580       Node* ctrl = control();
2581       // non volatile loads may be able to float
2582       if (!need_mem_bar && adr_type->isa_instptr()) {
2583         assert(adr_type->meet(TypePtr::NULL_PTR) != adr_type->remove_speculative(), "should be not null");
2584         intptr_t offset = Type::OffsetBot;
2585         AddPNode::Ideal_base_and_offset(adr, &_gvn, offset);
2586         if (offset >= 0) {
2587           int s = Klass::layout_helper_size_in_bytes(adr_type->isa_instptr()->klass()->layout_helper());
2588           if (offset < s) {
2589             // Guaranteed to be a valid access, no need to pin it
2590             dep = LoadNode::DependsOnlyOnTest;
2591             ctrl = NULL;
2592           }
2593         }
2594       }
2595       p = make_load(ctrl, adr, value_type, type, adr_type, mo, dep, requires_atomic_access, unaligned, mismatched);
2596       // load value
2597       switch (type) {
2598       case T_BOOLEAN:
2599       {
2600         // Normalize the value returned by getBoolean in the following cases
2601         if (mismatched ||
2602             heap_base_oop == top() ||                            // - heap_base_oop is NULL or


< prev index next >