< prev index next >

src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.cpp

Print this page
rev 58543 : 8241605: Shenandoah: More aggressive reference discovery


 843 void ShenandoahBarrierC2Support::follow_barrier_uses(Node* n, Node* ctrl, Unique_Node_List& uses, PhaseIdealLoop* phase) {
 844   for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
 845     Node* u = n->fast_out(i);
 846     if (!u->is_CFG() && phase->get_ctrl(u) == ctrl && (!u->is_Phi() || !u->in(0)->is_Loop() || u->in(LoopNode::LoopBackControl) != n)) {
 847       uses.push(u);
 848     }
 849   }
 850 }
 851 
 852 static void hide_strip_mined_loop(OuterStripMinedLoopNode* outer, CountedLoopNode* inner, PhaseIdealLoop* phase) {
 853   OuterStripMinedLoopEndNode* le = inner->outer_loop_end();
 854   Node* new_outer = new LoopNode(outer->in(LoopNode::EntryControl), outer->in(LoopNode::LoopBackControl));
 855   phase->register_control(new_outer, phase->get_loop(outer), outer->in(LoopNode::EntryControl));
 856   Node* new_le = new IfNode(le->in(0), le->in(1), le->_prob, le->_fcnt);
 857   phase->register_control(new_le, phase->get_loop(le), le->in(0));
 858   phase->lazy_replace(outer, new_outer);
 859   phase->lazy_replace(le, new_le);
 860   inner->clear_strip_mined();
 861 }
 862 
 863 void ShenandoahBarrierC2Support::test_heap_stable(Node*& ctrl, Node* raw_mem, Node*& heap_stable_ctrl,
 864                                                   PhaseIdealLoop* phase) {
 865   IdealLoopTree* loop = phase->get_loop(ctrl);
 866   Node* thread = new ThreadLocalNode();
 867   phase->register_new_node(thread, ctrl);
 868   Node* offset = phase->igvn().MakeConX(in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
 869   phase->set_ctrl(offset, phase->C->root());
 870   Node* gc_state_addr = new AddPNode(phase->C->top(), thread, offset);
 871   phase->register_new_node(gc_state_addr, ctrl);
 872   uint gc_state_idx = Compile::AliasIdxRaw;
 873   const TypePtr* gc_state_adr_type = NULL; // debug-mode-only argument
 874   debug_only(gc_state_adr_type = phase->C->get_adr_type(gc_state_idx));
 875 
 876   Node* gc_state = new LoadBNode(ctrl, raw_mem, gc_state_addr, gc_state_adr_type, TypeInt::BYTE, MemNode::unordered);
 877   phase->register_new_node(gc_state, ctrl);
 878   Node* heap_stable_and = new AndINode(gc_state, phase->igvn().intcon(ShenandoahHeap::HAS_FORWARDED));
 879   phase->register_new_node(heap_stable_and, ctrl);
 880   Node* heap_stable_cmp = new CmpINode(heap_stable_and, phase->igvn().zerocon(T_INT));
 881   phase->register_new_node(heap_stable_cmp, ctrl);
 882   Node* heap_stable_test = new BoolNode(heap_stable_cmp, BoolTest::ne);
 883   phase->register_new_node(heap_stable_test, ctrl);
 884   IfNode* heap_stable_iff = new IfNode(ctrl, heap_stable_test, PROB_UNLIKELY(0.999), COUNT_UNKNOWN);
 885   phase->register_control(heap_stable_iff, loop, ctrl);
 886 
 887   heap_stable_ctrl = new IfFalseNode(heap_stable_iff);
 888   phase->register_control(heap_stable_ctrl, loop, heap_stable_iff);
 889   ctrl = new IfTrueNode(heap_stable_iff);
 890   phase->register_control(ctrl, loop, heap_stable_iff);
 891 
 892   assert(is_heap_stable_test(heap_stable_iff), "Should match the shape");
 893 }
 894 
 895 void ShenandoahBarrierC2Support::test_null(Node*& ctrl, Node* val, Node*& null_ctrl, PhaseIdealLoop* phase) {
 896   const Type* val_t = phase->igvn().type(val);
 897   if (val_t->meet(TypePtr::NULL_PTR) == val_t) {
 898     IdealLoopTree* loop = phase->get_loop(ctrl);
 899     Node* null_cmp = new CmpPNode(val, phase->igvn().zerocon(T_OBJECT));
 900     phase->register_new_node(null_cmp, ctrl);
 901     Node* null_test = new BoolNode(null_cmp, BoolTest::ne);
 902     phase->register_new_node(null_test, ctrl);
 903     IfNode* null_iff = new IfNode(ctrl, null_test, PROB_LIKELY(0.999), COUNT_UNKNOWN);
 904     phase->register_control(null_iff, loop, ctrl);
 905     ctrl = new IfTrueNode(null_iff);
 906     phase->register_control(ctrl, loop, null_iff);
 907     null_ctrl = new IfFalseNode(null_iff);
 908     phase->register_control(null_ctrl, loop, null_iff);
 909   }
 910 }
 911 
 912 Node* ShenandoahBarrierC2Support::clone_null_check(Node*& c, Node* val, Node* unc_ctrl, PhaseIdealLoop* phase) {


1420       }
1421     }
1422 
1423     Node* uncasted_val = val;
1424     if (unc != NULL) {
1425       uncasted_val = val->in(1);
1426     }
1427 
1428     Node* heap_stable_ctrl = NULL;
1429     Node* null_ctrl = NULL;
1430 
1431     assert(val->bottom_type()->make_oopptr(), "need oop");
1432     assert(val->bottom_type()->make_oopptr()->const_oop() == NULL, "expect non-constant");
1433 
1434     enum { _heap_stable = 1, _not_cset, _evac_path, _null_path, PATH_LIMIT };
1435     Node* region = new RegionNode(PATH_LIMIT);
1436     Node* val_phi = new PhiNode(region, uncasted_val->bottom_type()->is_oopptr());
1437     Node* raw_mem_phi = PhiNode::make(region, raw_mem, Type::MEMORY, TypeRawPtr::BOTTOM);
1438 
1439     // Stable path.
1440     test_heap_stable(ctrl, raw_mem, heap_stable_ctrl, phase);
1441     IfNode* heap_stable_iff = heap_stable_ctrl->in(0)->as_If();
1442 
1443     // Heap stable case
1444     region->init_req(_heap_stable, heap_stable_ctrl);
1445     val_phi->init_req(_heap_stable, uncasted_val);
1446     raw_mem_phi->init_req(_heap_stable, raw_mem);
1447 
1448     Node* reg2_ctrl = NULL;
1449     // Null case
1450     test_null(ctrl, val, null_ctrl, phase);
1451     if (null_ctrl != NULL) {
1452       reg2_ctrl = null_ctrl->in(0);
1453       region->init_req(_null_path, null_ctrl);
1454       val_phi->init_req(_null_path, uncasted_val);
1455       raw_mem_phi->init_req(_null_path, raw_mem);
1456     } else {
1457       region->del_req(_null_path);
1458       val_phi->del_req(_null_path);
1459       raw_mem_phi->del_req(_null_path);
1460     }


1553     for(uint next = 0; next < uses.size(); next++ ) {
1554       Node *n = uses.at(next);
1555       assert(phase->get_ctrl(n) == ctrl, "bad control");
1556       assert(n != init_raw_mem, "should leave input raw mem above the barrier");
1557       phase->set_ctrl(n, region);
1558       follow_barrier_uses(n, ctrl, uses, phase);
1559     }
1560 
1561     // The slow path call produces memory: hook the raw memory phi
1562     // from the expanded load reference barrier with the rest of the graph
1563     // which may require adding memory phis at every post dominated
1564     // region and at enclosing loop heads. Use the memory state
1565     // collected in memory_nodes to fix the memory graph. Update that
1566     // memory state as we go.
1567     fixer.fix_mem(ctrl, region, init_raw_mem, raw_mem_for_ctrl, raw_mem_phi, uses);
1568   }
1569   // Done expanding load-reference-barriers.
1570   assert(ShenandoahBarrierSetC2::bsc2()->state()->load_reference_barriers_count() == 0, "all load reference barrier nodes should have been replaced");
1571 
1572   for (int i = state->enqueue_barriers_count() - 1; i >= 0; i--) {
1573     Node* barrier = state->enqueue_barrier(i);
1574     Node* pre_val = barrier->in(1);
1575 
1576     if (phase->igvn().type(pre_val)->higher_equal(TypePtr::NULL_PTR)) {
1577       ShouldNotReachHere();


1578       continue;
1579     }
1580 
1581     Node* ctrl = phase->get_ctrl(barrier);
1582 
1583     if (ctrl->is_Proj() && ctrl->in(0)->is_CallJava()) {
1584       assert(is_dominator(phase->get_ctrl(pre_val), ctrl->in(0)->in(0), pre_val, ctrl->in(0), phase), "can't move");
1585       ctrl = ctrl->in(0)->in(0);
1586       phase->set_ctrl(barrier, ctrl);
1587     } else if (ctrl->is_CallRuntime()) {
1588       assert(is_dominator(phase->get_ctrl(pre_val), ctrl->in(0), pre_val, ctrl, phase), "can't move");
1589       ctrl = ctrl->in(0);
1590       phase->set_ctrl(barrier, ctrl);
1591     }
1592 
1593     Node* init_ctrl = ctrl;
1594     IdealLoopTree* loop = phase->get_loop(ctrl);
1595     Node* raw_mem = fixer.find_mem(ctrl, barrier);
1596     Node* init_raw_mem = raw_mem;
1597     Node* raw_mem_for_ctrl = fixer.find_mem(ctrl, NULL);
1598     Node* heap_stable_ctrl = NULL;
1599     Node* null_ctrl = NULL;
1600     uint last = phase->C->unique();
1601 
1602     enum { _heap_stable = 1, _heap_unstable, PATH_LIMIT };
1603     Node* region = new RegionNode(PATH_LIMIT);
1604     Node* phi = PhiNode::make(region, raw_mem, Type::MEMORY, TypeRawPtr::BOTTOM);
1605 
1606     enum { _fast_path = 1, _slow_path, _null_path, PATH_LIMIT2 };
1607     Node* region2 = new RegionNode(PATH_LIMIT2);
1608     Node* phi2 = PhiNode::make(region2, raw_mem, Type::MEMORY, TypeRawPtr::BOTTOM);
1609 
1610     // Stable path.
1611     test_heap_stable(ctrl, raw_mem, heap_stable_ctrl, phase);
1612     region->init_req(_heap_stable, heap_stable_ctrl);
1613     phi->init_req(_heap_stable, raw_mem);
1614 
1615     // Null path
1616     Node* reg2_ctrl = NULL;
1617     test_null(ctrl, pre_val, null_ctrl, phase);
1618     if (null_ctrl != NULL) {
1619       reg2_ctrl = null_ctrl->in(0);
1620       region2->init_req(_null_path, null_ctrl);
1621       phi2->init_req(_null_path, raw_mem);
1622     } else {
1623       region2->del_req(_null_path);
1624       phi2->del_req(_null_path);
1625     }
1626 
1627     const int index_offset = in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset());
1628     const int buffer_offset = in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset());
1629     Node* thread = new ThreadLocalNode();
1630     phase->register_new_node(thread, ctrl);
1631     Node* buffer_adr = new AddPNode(phase->C->top(), thread, phase->igvn().MakeConX(buffer_offset));


2154     if (n == NULL) {
2155       return n;
2156     } else if (n->bottom_type() == TypePtr::NULL_PTR) {
2157       return n;
2158     } else if (n->bottom_type()->make_oopptr() != NULL && n->bottom_type()->make_oopptr()->const_oop() != NULL) {
2159       return n;
2160     } else if (n->is_ConstraintCast() ||
2161                n->Opcode() == Op_DecodeN ||
2162                n->Opcode() == Op_EncodeP) {
2163       n = n->in(1);
2164     } else if (n->is_Proj()) {
2165       n = n->in(0);
2166     } else {
2167       return n;
2168     }
2169   }
2170   ShouldNotReachHere();
2171   return NULL;
2172 }
2173 

















































































2174 Node* ShenandoahEnqueueBarrierNode::Identity(PhaseGVN* phase) {
2175   PhaseIterGVN* igvn = phase->is_IterGVN();
2176 
2177   Node* n = next(in(1));
2178 
2179   int cont = needed(n);
2180 
2181   if (cont == NotNeeded) {
2182     return in(1);
2183   } else if (cont == MaybeNeeded) {
2184     if (igvn == NULL) {
2185       phase->record_for_igvn(this);
2186       return this;
2187     } else {
2188       ResourceMark rm;
2189       Unique_Node_List wq;
2190       uint wq_i = 0;
2191 
2192       for (;;) {
2193         if (n->is_Phi()) {


3228       case Op_WeakCompareAndSwapB:
3229       case Op_WeakCompareAndSwapS:
3230       case Op_WeakCompareAndSwapN:
3231       case Op_WeakCompareAndSwapP:
3232       case Op_ShenandoahCompareAndSwapN:
3233       case Op_ShenandoahCompareAndSwapP:
3234       case Op_ShenandoahWeakCompareAndSwapN:
3235       case Op_ShenandoahWeakCompareAndSwapP:
3236       case Op_ShenandoahCompareAndExchangeN:
3237       case Op_ShenandoahCompareAndExchangeP:
3238       case Op_GetAndSetL:
3239       case Op_GetAndSetI:
3240       case Op_GetAndSetB:
3241       case Op_GetAndSetS:
3242       case Op_GetAndSetP:
3243       case Op_GetAndSetN:
3244       case Op_GetAndAddL:
3245       case Op_GetAndAddI:
3246       case Op_GetAndAddB:
3247       case Op_GetAndAddS:
3248       case Op_ShenandoahEnqueueBarrier:
3249       case Op_FastLock:
3250       case Op_FastUnlock:
3251       case Op_Rethrow:
3252       case Op_Return:
3253       case Op_StoreB:
3254       case Op_StoreC:
3255       case Op_StoreD:
3256       case Op_StoreF:
3257       case Op_StoreL:
3258       case Op_StoreLConditional:
3259       case Op_StoreI:
3260       case Op_StoreIConditional:
3261       case Op_StoreN:
3262       case Op_StoreP:
3263       case Op_StoreVector:
3264       case Op_StrInflatedCopy:
3265       case Op_StrCompressedCopy:
3266       case Op_EncodeP:
3267       case Op_CastP2X:
3268       case Op_SafePoint:


3305         if (ShenandoahOptimizeStaticFinals && is_static && is_final) {
3306           // Loading the constant does not require barriers: it should be handled
3307           // as part of GC roots already.
3308         } else {
3309           return false;
3310         }
3311         break;
3312       }
3313       case Op_Conv2B:
3314       case Op_LoadRange:
3315       case Op_LoadKlass:
3316       case Op_LoadNKlass:
3317         // Do not require barriers
3318         break;
3319       case Op_AddP:
3320       case Op_CheckCastPP:
3321       case Op_CastPP:
3322       case Op_CMoveP:
3323       case Op_Phi:
3324       case Op_ShenandoahLoadReferenceBarrier:

3325         // Whether or not these need the barriers depends on their users
3326         visit_users = true;
3327         break;
3328       default: {
3329 #ifdef ASSERT
3330         fatal("Unknown node in is_redundant: %s", NodeClassNames[n->Opcode()]);
3331 #else
3332         // Default to have excess barriers, rather than miss some.
3333         return false;
3334 #endif
3335       }
3336     }
3337 
3338     stack.pop();
3339     if (visit_users) {
3340       for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3341         Node* user = n->fast_out(i);
3342         if (user != NULL) {
3343           stack.push(user, 0);
3344         }




 843 void ShenandoahBarrierC2Support::follow_barrier_uses(Node* n, Node* ctrl, Unique_Node_List& uses, PhaseIdealLoop* phase) {
 844   for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
 845     Node* u = n->fast_out(i);
 846     if (!u->is_CFG() && phase->get_ctrl(u) == ctrl && (!u->is_Phi() || !u->in(0)->is_Loop() || u->in(LoopNode::LoopBackControl) != n)) {
 847       uses.push(u);
 848     }
 849   }
 850 }
 851 
 852 static void hide_strip_mined_loop(OuterStripMinedLoopNode* outer, CountedLoopNode* inner, PhaseIdealLoop* phase) {
 853   OuterStripMinedLoopEndNode* le = inner->outer_loop_end();
 854   Node* new_outer = new LoopNode(outer->in(LoopNode::EntryControl), outer->in(LoopNode::LoopBackControl));
 855   phase->register_control(new_outer, phase->get_loop(outer), outer->in(LoopNode::EntryControl));
 856   Node* new_le = new IfNode(le->in(0), le->in(1), le->_prob, le->_fcnt);
 857   phase->register_control(new_le, phase->get_loop(le), le->in(0));
 858   phase->lazy_replace(outer, new_outer);
 859   phase->lazy_replace(le, new_le);
 860   inner->clear_strip_mined();
 861 }
 862 
 863 void ShenandoahBarrierC2Support::test_heap_state(Node*& ctrl, Node* raw_mem, Node*& heap_stable_ctrl,
 864                                                  PhaseIdealLoop* phase, int flags) {
 865   IdealLoopTree* loop = phase->get_loop(ctrl);
 866   Node* thread = new ThreadLocalNode();
 867   phase->register_new_node(thread, ctrl);
 868   Node* offset = phase->igvn().MakeConX(in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
 869   phase->set_ctrl(offset, phase->C->root());
 870   Node* gc_state_addr = new AddPNode(phase->C->top(), thread, offset);
 871   phase->register_new_node(gc_state_addr, ctrl);
 872   uint gc_state_idx = Compile::AliasIdxRaw;
 873   const TypePtr* gc_state_adr_type = NULL; // debug-mode-only argument
 874   debug_only(gc_state_adr_type = phase->C->get_adr_type(gc_state_idx));
 875 
 876   Node* gc_state = new LoadBNode(ctrl, raw_mem, gc_state_addr, gc_state_adr_type, TypeInt::BYTE, MemNode::unordered);
 877   phase->register_new_node(gc_state, ctrl);
 878   Node* heap_stable_and = new AndINode(gc_state, phase->igvn().intcon(flags));
 879   phase->register_new_node(heap_stable_and, ctrl);
 880   Node* heap_stable_cmp = new CmpINode(heap_stable_and, phase->igvn().zerocon(T_INT));
 881   phase->register_new_node(heap_stable_cmp, ctrl);
 882   Node* heap_stable_test = new BoolNode(heap_stable_cmp, BoolTest::ne);
 883   phase->register_new_node(heap_stable_test, ctrl);
 884   IfNode* heap_stable_iff = new IfNode(ctrl, heap_stable_test, PROB_UNLIKELY(0.999), COUNT_UNKNOWN);
 885   phase->register_control(heap_stable_iff, loop, ctrl);
 886 
 887   heap_stable_ctrl = new IfFalseNode(heap_stable_iff);
 888   phase->register_control(heap_stable_ctrl, loop, heap_stable_iff);
 889   ctrl = new IfTrueNode(heap_stable_iff);
 890   phase->register_control(ctrl, loop, heap_stable_iff);
 891 
 892   assert(is_heap_state_test(heap_stable_iff, flags), "Should match the shape");
 893 }
 894 
 895 void ShenandoahBarrierC2Support::test_null(Node*& ctrl, Node* val, Node*& null_ctrl, PhaseIdealLoop* phase) {
 896   const Type* val_t = phase->igvn().type(val);
 897   if (val_t->meet(TypePtr::NULL_PTR) == val_t) {
 898     IdealLoopTree* loop = phase->get_loop(ctrl);
 899     Node* null_cmp = new CmpPNode(val, phase->igvn().zerocon(T_OBJECT));
 900     phase->register_new_node(null_cmp, ctrl);
 901     Node* null_test = new BoolNode(null_cmp, BoolTest::ne);
 902     phase->register_new_node(null_test, ctrl);
 903     IfNode* null_iff = new IfNode(ctrl, null_test, PROB_LIKELY(0.999), COUNT_UNKNOWN);
 904     phase->register_control(null_iff, loop, ctrl);
 905     ctrl = new IfTrueNode(null_iff);
 906     phase->register_control(ctrl, loop, null_iff);
 907     null_ctrl = new IfFalseNode(null_iff);
 908     phase->register_control(null_ctrl, loop, null_iff);
 909   }
 910 }
 911 
 912 Node* ShenandoahBarrierC2Support::clone_null_check(Node*& c, Node* val, Node* unc_ctrl, PhaseIdealLoop* phase) {


1420       }
1421     }
1422 
1423     Node* uncasted_val = val;
1424     if (unc != NULL) {
1425       uncasted_val = val->in(1);
1426     }
1427 
1428     Node* heap_stable_ctrl = NULL;
1429     Node* null_ctrl = NULL;
1430 
1431     assert(val->bottom_type()->make_oopptr(), "need oop");
1432     assert(val->bottom_type()->make_oopptr()->const_oop() == NULL, "expect non-constant");
1433 
1434     enum { _heap_stable = 1, _not_cset, _evac_path, _null_path, PATH_LIMIT };
1435     Node* region = new RegionNode(PATH_LIMIT);
1436     Node* val_phi = new PhiNode(region, uncasted_val->bottom_type()->is_oopptr());
1437     Node* raw_mem_phi = PhiNode::make(region, raw_mem, Type::MEMORY, TypeRawPtr::BOTTOM);
1438 
1439     // Stable path.
1440     test_heap_state(ctrl, raw_mem, heap_stable_ctrl, phase, ShenandoahHeap::HAS_FORWARDED);
1441     IfNode* heap_stable_iff = heap_stable_ctrl->in(0)->as_If();
1442 
1443     // Heap stable case
1444     region->init_req(_heap_stable, heap_stable_ctrl);
1445     val_phi->init_req(_heap_stable, uncasted_val);
1446     raw_mem_phi->init_req(_heap_stable, raw_mem);
1447 
1448     Node* reg2_ctrl = NULL;
1449     // Null case
1450     test_null(ctrl, val, null_ctrl, phase);
1451     if (null_ctrl != NULL) {
1452       reg2_ctrl = null_ctrl->in(0);
1453       region->init_req(_null_path, null_ctrl);
1454       val_phi->init_req(_null_path, uncasted_val);
1455       raw_mem_phi->init_req(_null_path, raw_mem);
1456     } else {
1457       region->del_req(_null_path);
1458       val_phi->del_req(_null_path);
1459       raw_mem_phi->del_req(_null_path);
1460     }


1553     for(uint next = 0; next < uses.size(); next++ ) {
1554       Node *n = uses.at(next);
1555       assert(phase->get_ctrl(n) == ctrl, "bad control");
1556       assert(n != init_raw_mem, "should leave input raw mem above the barrier");
1557       phase->set_ctrl(n, region);
1558       follow_barrier_uses(n, ctrl, uses, phase);
1559     }
1560 
1561     // The slow path call produces memory: hook the raw memory phi
1562     // from the expanded load reference barrier with the rest of the graph
1563     // which may require adding memory phis at every post dominated
1564     // region and at enclosing loop heads. Use the memory state
1565     // collected in memory_nodes to fix the memory graph. Update that
1566     // memory state as we go.
1567     fixer.fix_mem(ctrl, region, init_raw_mem, raw_mem_for_ctrl, raw_mem_phi, uses);
1568   }
1569   // Done expanding load-reference-barriers.
1570   assert(ShenandoahBarrierSetC2::bsc2()->state()->load_reference_barriers_count() == 0, "all load reference barrier nodes should have been replaced");
1571 
1572   for (int i = state->enqueue_barriers_count() - 1; i >= 0; i--) {
1573     ShenandoahEnqueueBarrierNode* barrier = state->enqueue_barrier(i);
1574     Node* pre_val = barrier->in(1);
1575 
1576     assert(!phase->igvn().type(pre_val)->higher_equal(TypePtr::NULL_PTR), "no known-NULLs here");
1577 
1578     if (barrier->can_eliminate(phase)) {
1579       phase->igvn().replace_node(barrier, pre_val);
1580       continue;
1581     }
1582 
1583     Node* ctrl = phase->get_ctrl(barrier);
1584 
1585     if (ctrl->is_Proj() && ctrl->in(0)->is_CallJava()) {
1586       assert(is_dominator(phase->get_ctrl(pre_val), ctrl->in(0)->in(0), pre_val, ctrl->in(0), phase), "can't move");
1587       ctrl = ctrl->in(0)->in(0);
1588       phase->set_ctrl(barrier, ctrl);
1589     } else if (ctrl->is_CallRuntime()) {
1590       assert(is_dominator(phase->get_ctrl(pre_val), ctrl->in(0), pre_val, ctrl, phase), "can't move");
1591       ctrl = ctrl->in(0);
1592       phase->set_ctrl(barrier, ctrl);
1593     }
1594 
1595     Node* init_ctrl = ctrl;
1596     IdealLoopTree* loop = phase->get_loop(ctrl);
1597     Node* raw_mem = fixer.find_mem(ctrl, barrier);
1598     Node* init_raw_mem = raw_mem;
1599     Node* raw_mem_for_ctrl = fixer.find_mem(ctrl, NULL);
1600     Node* heap_stable_ctrl = NULL;
1601     Node* null_ctrl = NULL;
1602     uint last = phase->C->unique();
1603 
1604     enum { _heap_stable = 1, _heap_unstable, PATH_LIMIT };
1605     Node* region = new RegionNode(PATH_LIMIT);
1606     Node* phi = PhiNode::make(region, raw_mem, Type::MEMORY, TypeRawPtr::BOTTOM);
1607 
1608     enum { _fast_path = 1, _slow_path, _null_path, PATH_LIMIT2 };
1609     Node* region2 = new RegionNode(PATH_LIMIT2);
1610     Node* phi2 = PhiNode::make(region2, raw_mem, Type::MEMORY, TypeRawPtr::BOTTOM);
1611 
1612     // Stable path.
1613     test_heap_state(ctrl, raw_mem, heap_stable_ctrl, phase, ShenandoahHeap::TRAVERSAL | ShenandoahHeap::MARKING);
1614     region->init_req(_heap_stable, heap_stable_ctrl);
1615     phi->init_req(_heap_stable, raw_mem);
1616 
1617     // Null path
1618     Node* reg2_ctrl = NULL;
1619     test_null(ctrl, pre_val, null_ctrl, phase);
1620     if (null_ctrl != NULL) {
1621       reg2_ctrl = null_ctrl->in(0);
1622       region2->init_req(_null_path, null_ctrl);
1623       phi2->init_req(_null_path, raw_mem);
1624     } else {
1625       region2->del_req(_null_path);
1626       phi2->del_req(_null_path);
1627     }
1628 
1629     const int index_offset = in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset());
1630     const int buffer_offset = in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset());
1631     Node* thread = new ThreadLocalNode();
1632     phase->register_new_node(thread, ctrl);
1633     Node* buffer_adr = new AddPNode(phase->C->top(), thread, phase->igvn().MakeConX(buffer_offset));


2156     if (n == NULL) {
2157       return n;
2158     } else if (n->bottom_type() == TypePtr::NULL_PTR) {
2159       return n;
2160     } else if (n->bottom_type()->make_oopptr() != NULL && n->bottom_type()->make_oopptr()->const_oop() != NULL) {
2161       return n;
2162     } else if (n->is_ConstraintCast() ||
2163                n->Opcode() == Op_DecodeN ||
2164                n->Opcode() == Op_EncodeP) {
2165       n = n->in(1);
2166     } else if (n->is_Proj()) {
2167       n = n->in(0);
2168     } else {
2169       return n;
2170     }
2171   }
2172   ShouldNotReachHere();
2173   return NULL;
2174 }
2175 
2176 bool ShenandoahEnqueueBarrierNode::can_eliminate(PhaseIdealLoop* phase) {
2177   return ShenandoahHeap::heap()->traversal_gc() == NULL &&
2178          is_redundant() && ShenandoahAggressiveReferenceDiscovery;
2179 }
2180 
2181 bool ShenandoahEnqueueBarrierNode::is_redundant() {
2182   Unique_Node_List visited;
2183   Node_Stack stack(0);
2184   stack.push(this, 0);
2185 
2186   while (stack.size() > 0) {
2187     Node* n = stack.node();
2188     if (visited.member(n)) {
2189       stack.pop();
2190       continue;
2191     }
2192     visited.push(n);
2193     bool visit_users = false;
2194     switch (n->Opcode()) {
2195       case Op_CallStaticJava:
2196         if (n->as_CallStaticJava()->uncommon_trap_request() == 0) {
2197           return false;
2198         }
2199         break;
2200       case Op_CallDynamicJava:
2201       case Op_CompareAndExchangeN:
2202       case Op_CompareAndExchangeP:
2203       case Op_CompareAndSwapN:
2204       case Op_CompareAndSwapP:
2205       case Op_ShenandoahCompareAndSwapN:
2206       case Op_ShenandoahCompareAndSwapP:
2207       case Op_GetAndSetN:
2208       case Op_GetAndSetP:
2209       case Op_Return:
2210       case Op_StoreN:
2211       case Op_StoreP:
2212         return false;
2213         break;
2214       case Op_AddP:
2215       case Op_Allocate:
2216       case Op_AllocateArray:
2217       case Op_ArrayCopy:
2218       case Op_CmpP:
2219       case Op_LoadL:
2220       case Op_SafePoint:
2221       case Op_SubTypeCheck:
2222       case Op_StoreLConditional:
2223       case Op_StoreIConditional:
2224       case Op_FastUnlock:
2225         break;
2226       case Op_CastPP:
2227       case Op_CheckCastPP:
2228       case Op_CMoveN:
2229       case Op_CMoveP:
2230       case Op_EncodeP:
2231       case Op_Phi:
2232       case Op_ShenandoahEnqueueBarrier:
2233         visit_users = true;
2234         break;
2235       default: {
2236 #ifdef ASSERT
2237         fatal("Unknown node in is_redundant: %s", NodeClassNames[n->Opcode()]);
2238 #endif
2239         // Default to useful: better to have excess barriers, rather than miss some.
2240         return false;
2241       }
2242     }
2243 
2244     stack.pop();
2245     if (visit_users) {
2246       for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
2247         Node* user = n->fast_out(i);
2248         if (user != NULL) {
2249           stack.push(user, 0);
2250         }
2251       }
2252     }
2253   }
2254   return true;
2255 }
2256 
2257 Node* ShenandoahEnqueueBarrierNode::Identity(PhaseGVN* phase) {
2258   PhaseIterGVN* igvn = phase->is_IterGVN();
2259 
2260   Node* n = next(in(1));
2261 
2262   int cont = needed(n);
2263 
2264   if (cont == NotNeeded) {
2265     return in(1);
2266   } else if (cont == MaybeNeeded) {
2267     if (igvn == NULL) {
2268       phase->record_for_igvn(this);
2269       return this;
2270     } else {
2271       ResourceMark rm;
2272       Unique_Node_List wq;
2273       uint wq_i = 0;
2274 
2275       for (;;) {
2276         if (n->is_Phi()) {


3311       case Op_WeakCompareAndSwapB:
3312       case Op_WeakCompareAndSwapS:
3313       case Op_WeakCompareAndSwapN:
3314       case Op_WeakCompareAndSwapP:
3315       case Op_ShenandoahCompareAndSwapN:
3316       case Op_ShenandoahCompareAndSwapP:
3317       case Op_ShenandoahWeakCompareAndSwapN:
3318       case Op_ShenandoahWeakCompareAndSwapP:
3319       case Op_ShenandoahCompareAndExchangeN:
3320       case Op_ShenandoahCompareAndExchangeP:
3321       case Op_GetAndSetL:
3322       case Op_GetAndSetI:
3323       case Op_GetAndSetB:
3324       case Op_GetAndSetS:
3325       case Op_GetAndSetP:
3326       case Op_GetAndSetN:
3327       case Op_GetAndAddL:
3328       case Op_GetAndAddI:
3329       case Op_GetAndAddB:
3330       case Op_GetAndAddS:

3331       case Op_FastLock:
3332       case Op_FastUnlock:
3333       case Op_Rethrow:
3334       case Op_Return:
3335       case Op_StoreB:
3336       case Op_StoreC:
3337       case Op_StoreD:
3338       case Op_StoreF:
3339       case Op_StoreL:
3340       case Op_StoreLConditional:
3341       case Op_StoreI:
3342       case Op_StoreIConditional:
3343       case Op_StoreN:
3344       case Op_StoreP:
3345       case Op_StoreVector:
3346       case Op_StrInflatedCopy:
3347       case Op_StrCompressedCopy:
3348       case Op_EncodeP:
3349       case Op_CastP2X:
3350       case Op_SafePoint:


3387         if (ShenandoahOptimizeStaticFinals && is_static && is_final) {
3388           // Loading the constant does not require barriers: it should be handled
3389           // as part of GC roots already.
3390         } else {
3391           return false;
3392         }
3393         break;
3394       }
3395       case Op_Conv2B:
3396       case Op_LoadRange:
3397       case Op_LoadKlass:
3398       case Op_LoadNKlass:
3399         // Do not require barriers
3400         break;
3401       case Op_AddP:
3402       case Op_CheckCastPP:
3403       case Op_CastPP:
3404       case Op_CMoveP:
3405       case Op_Phi:
3406       case Op_ShenandoahLoadReferenceBarrier:
3407       case Op_ShenandoahEnqueueBarrier:
3408         // Whether or not these need the barriers depends on their users
3409         visit_users = true;
3410         break;
3411       default: {
3412 #ifdef ASSERT
3413         fatal("Unknown node in is_redundant: %s", NodeClassNames[n->Opcode()]);
3414 #else
3415         // Default to have excess barriers, rather than miss some.
3416         return false;
3417 #endif
3418       }
3419     }
3420 
3421     stack.pop();
3422     if (visit_users) {
3423       for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3424         Node* user = n->fast_out(i);
3425         if (user != NULL) {
3426           stack.push(user, 0);
3427         }


< prev index next >