< prev index next >

src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.cpp

Print this page
rev 59455 : JDK-8245955: Shenandoah: Remove LRB/is_redundant optimization


1079   ShenandoahBarrierSetC2State* state = ShenandoahBarrierSetC2::bsc2()->state();
1080 
1081   Unique_Node_List uses;
1082   for (int i = 0; i < state->enqueue_barriers_count(); i++) {
1083     Node* barrier = state->enqueue_barrier(i);
1084     Node* ctrl = phase->get_ctrl(barrier);
1085     IdealLoopTree* loop = phase->get_loop(ctrl);
1086     if (loop->_head->is_OuterStripMinedLoop()) {
1087       // Expanding a barrier here will break loop strip mining
1088       // verification. Transform the loop so the loop nest doesn't
1089       // appear as strip mined.
1090       OuterStripMinedLoopNode* outer = loop->_head->as_OuterStripMinedLoop();
1091       hide_strip_mined_loop(outer, outer->unique_ctrl_out()->as_CountedLoop(), phase);
1092     }
1093   }
1094 
1095   Node_Stack stack(0);
1096   Node_List clones;
1097   for (int i = state->load_reference_barriers_count() - 1; i >= 0; i--) {
1098     ShenandoahLoadReferenceBarrierNode* lrb = state->load_reference_barrier(i);
1099     if (lrb->is_redundant()) {
1100       continue;
1101     }
1102 
1103     Node* ctrl = phase->get_ctrl(lrb);
1104     Node* val = lrb->in(ShenandoahLoadReferenceBarrierNode::ValueIn);
1105 
1106     CallStaticJavaNode* unc = NULL;
1107     Node* unc_ctrl = NULL;
1108     Node* uncasted_val = val;
1109 
1110     for (DUIterator_Fast imax, i = lrb->fast_outs(imax); i < imax; i++) {
1111       Node* u = lrb->fast_out(i);
1112       if (u->Opcode() == Op_CastPP &&
1113           u->in(0) != NULL &&
1114           phase->is_dominator(u->in(0), ctrl)) {
1115         const Type* u_t = phase->igvn().type(u);
1116 
1117         if (u_t->meet(TypePtr::NULL_PTR) != u_t &&
1118             u->in(0)->Opcode() == Op_IfTrue &&
1119             u->in(0)->as_Proj()->is_uncommon_trap_if_pattern(Deoptimization::Reason_none) &&
1120             u->in(0)->in(0)->is_If() &&
1121             u->in(0)->in(0)->in(1)->Opcode() == Op_Bool &&


1273                   assert(nb > 0, "should have replaced some uses");
1274                 }
1275                 replaced = true;
1276               }
1277             }
1278             if (!replaced) {
1279               stack.set_index(idx+1);
1280             }
1281           }
1282         } else {
1283           stack.pop();
1284           clones.pop();
1285         }
1286       } while (stack.size() > 0);
1287       assert(stack.size() == 0 && clones.size() == 0, "");
1288     }
1289   }
1290 
1291   for (int i = 0; i < state->load_reference_barriers_count(); i++) {
1292     ShenandoahLoadReferenceBarrierNode* lrb = state->load_reference_barrier(i);
1293     if (lrb->is_redundant()) {
1294       continue;
1295     }
1296     Node* ctrl = phase->get_ctrl(lrb);
1297     IdealLoopTree* loop = phase->get_loop(ctrl);
1298     if (loop->_head->is_OuterStripMinedLoop()) {
1299       // Expanding a barrier here will break loop strip mining
1300       // verification. Transform the loop so the loop nest doesn't
1301       // appear as strip mined.
1302       OuterStripMinedLoopNode* outer = loop->_head->as_OuterStripMinedLoop();
1303       hide_strip_mined_loop(outer, outer->unique_ctrl_out()->as_CountedLoop(), phase);
1304     }
1305   }
1306 
1307   // Expand load-reference-barriers
1308   MemoryGraphFixer fixer(Compile::AliasIdxRaw, true, phase);
1309   Unique_Node_List uses_to_ignore;
1310   for (int i = state->load_reference_barriers_count() - 1; i >= 0; i--) {
1311     ShenandoahLoadReferenceBarrierNode* lrb = state->load_reference_barrier(i);
1312     if (lrb->is_redundant()) {
1313       phase->igvn().replace_node(lrb, lrb->in(ShenandoahLoadReferenceBarrierNode::ValueIn));
1314       continue;
1315     }
1316     uint last = phase->C->unique();
1317     Node* ctrl = phase->get_ctrl(lrb);
1318     Node* val = lrb->in(ShenandoahLoadReferenceBarrierNode::ValueIn);
1319 
1320 
1321     Node* orig_ctrl = ctrl;
1322 
1323     Node* raw_mem = fixer.find_mem(ctrl, lrb);
1324     Node* init_raw_mem = raw_mem;
1325     Node* raw_mem_for_ctrl = fixer.find_mem(ctrl, NULL);
1326 
1327     IdealLoopTree *loop = phase->get_loop(ctrl);
1328 
1329     Node* heap_stable_ctrl = NULL;
1330     Node* null_ctrl = NULL;
1331 
1332     assert(val->bottom_type()->make_oopptr(), "need oop");
1333     assert(val->bottom_type()->make_oopptr()->const_oop() == NULL, "expect non-constant");
1334 
1335     enum { _heap_stable = 1, _not_cset, _evac_path, PATH_LIMIT };


3007       return true;
3008     case Op_CMoveN:
3009     case Op_CMoveP:
3010       return needs_barrier_impl(phase, n->in(2), visited) ||
3011              needs_barrier_impl(phase, n->in(3), visited);
3012     case Op_ShenandoahEnqueueBarrier:
3013       return needs_barrier_impl(phase, n->in(1), visited);
3014     case Op_CreateEx:
3015       return false;
3016     default:
3017       break;
3018   }
3019 #ifdef ASSERT
3020   tty->print("need barrier on?: ");
3021   tty->print_cr("ins:");
3022   n->dump(2);
3023   tty->print_cr("outs:");
3024   n->dump(-2);
3025   ShouldNotReachHere();
3026 #endif
3027   return true;
3028 }
3029 
3030 bool ShenandoahLoadReferenceBarrierNode::is_redundant() {
3031   Unique_Node_List visited;
3032   Node_Stack stack(0);
3033   stack.push(this, 0);
3034 
3035   // Check if the barrier is actually useful: go over nodes looking for useful uses
3036   // (e.g. memory accesses). Stop once we detected a required use. Otherwise, walk
3037   // until we ran out of nodes, and then declare the barrier redundant.
3038   while (stack.size() > 0) {
3039     Node* n = stack.node();
3040     if (visited.member(n)) {
3041       stack.pop();
3042       continue;
3043     }
3044     visited.push(n);
3045     bool visit_users = false;
3046     switch (n->Opcode()) {
3047       case Op_CallStaticJava:
3048       case Op_CallDynamicJava:
3049       case Op_CallLeaf:
3050       case Op_CallLeafNoFP:
3051       case Op_CompareAndSwapL:
3052       case Op_CompareAndSwapI:
3053       case Op_CompareAndSwapB:
3054       case Op_CompareAndSwapS:
3055       case Op_CompareAndSwapN:
3056       case Op_CompareAndSwapP:
3057       case Op_CompareAndExchangeL:
3058       case Op_CompareAndExchangeI:
3059       case Op_CompareAndExchangeB:
3060       case Op_CompareAndExchangeS:
3061       case Op_CompareAndExchangeN:
3062       case Op_CompareAndExchangeP:
3063       case Op_WeakCompareAndSwapL:
3064       case Op_WeakCompareAndSwapI:
3065       case Op_WeakCompareAndSwapB:
3066       case Op_WeakCompareAndSwapS:
3067       case Op_WeakCompareAndSwapN:
3068       case Op_WeakCompareAndSwapP:
3069       case Op_ShenandoahCompareAndSwapN:
3070       case Op_ShenandoahCompareAndSwapP:
3071       case Op_ShenandoahWeakCompareAndSwapN:
3072       case Op_ShenandoahWeakCompareAndSwapP:
3073       case Op_ShenandoahCompareAndExchangeN:
3074       case Op_ShenandoahCompareAndExchangeP:
3075       case Op_GetAndSetL:
3076       case Op_GetAndSetI:
3077       case Op_GetAndSetB:
3078       case Op_GetAndSetS:
3079       case Op_GetAndSetP:
3080       case Op_GetAndSetN:
3081       case Op_GetAndAddL:
3082       case Op_GetAndAddI:
3083       case Op_GetAndAddB:
3084       case Op_GetAndAddS:
3085       case Op_ShenandoahEnqueueBarrier:
3086       case Op_FastLock:
3087       case Op_FastUnlock:
3088       case Op_Rethrow:
3089       case Op_Return:
3090       case Op_StoreB:
3091       case Op_StoreC:
3092       case Op_StoreD:
3093       case Op_StoreF:
3094       case Op_StoreL:
3095       case Op_StoreLConditional:
3096       case Op_StoreI:
3097       case Op_StoreIConditional:
3098       case Op_StoreN:
3099       case Op_StoreP:
3100       case Op_StoreVector:
3101       case Op_StrInflatedCopy:
3102       case Op_StrCompressedCopy:
3103       case Op_EncodeP:
3104       case Op_CastP2X:
3105       case Op_SafePoint:
3106       case Op_EncodeISOArray:
3107       case Op_AryEq:
3108       case Op_StrEquals:
3109       case Op_StrComp:
3110       case Op_StrIndexOf:
3111       case Op_StrIndexOfChar:
3112       case Op_HasNegatives:
3113         // Known to require barriers
3114         return false;
3115       case Op_CmpP: {
3116         if (n->in(1)->bottom_type()->higher_equal(TypePtr::NULL_PTR) ||
3117             n->in(2)->bottom_type()->higher_equal(TypePtr::NULL_PTR)) {
3118           // One of the sides is known null, no need for barrier.
3119         } else {
3120           return false;
3121         }
3122         break;
3123       }
3124       case Op_LoadB:
3125       case Op_LoadUB:
3126       case Op_LoadUS:
3127       case Op_LoadD:
3128       case Op_LoadF:
3129       case Op_LoadL:
3130       case Op_LoadI:
3131       case Op_LoadS:
3132       case Op_LoadN:
3133       case Op_LoadP:
3134       case Op_LoadVector: {
3135         const TypePtr* adr_type = n->adr_type();
3136         int alias_idx = Compile::current()->get_alias_index(adr_type);
3137         Compile::AliasType* alias_type = Compile::current()->alias_type(alias_idx);
3138         ciField* field = alias_type->field();
3139         bool is_static = field != NULL && field->is_static();
3140         bool is_final = field != NULL && field->is_final();
3141 
3142         if (ShenandoahOptimizeStaticFinals && is_static && is_final) {
3143           // Loading the constant does not require barriers: it should be handled
3144           // as part of GC roots already.
3145         } else {
3146           return false;
3147         }
3148         break;
3149       }
3150       case Op_Conv2B:
3151       case Op_LoadRange:
3152       case Op_LoadKlass:
3153       case Op_LoadNKlass:
3154         // Do not require barriers
3155         break;
3156       case Op_AddP:
3157       case Op_CheckCastPP:
3158       case Op_CastPP:
3159       case Op_CMoveP:
3160       case Op_Phi:
3161       case Op_ShenandoahLoadReferenceBarrier:
3162         // Whether or not these need the barriers depends on their users
3163         visit_users = true;
3164         break;
3165       default: {
3166 #ifdef ASSERT
3167         fatal("Unknown node in is_redundant: %s", NodeClassNames[n->Opcode()]);
3168 #else
3169         // Default to have excess barriers, rather than miss some.
3170         return false;
3171 #endif
3172       }
3173     }
3174 
3175     stack.pop();
3176     if (visit_users) {
3177       for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
3178         Node* user = n->fast_out(i);
3179         if (user != NULL) {
3180           stack.push(user, 0);
3181         }
3182       }
3183     }
3184   }
3185 
3186   // No need for barrier found.
3187   return true;
3188 }


1079   ShenandoahBarrierSetC2State* state = ShenandoahBarrierSetC2::bsc2()->state();
1080 
1081   Unique_Node_List uses;
1082   for (int i = 0; i < state->enqueue_barriers_count(); i++) {
1083     Node* barrier = state->enqueue_barrier(i);
1084     Node* ctrl = phase->get_ctrl(barrier);
1085     IdealLoopTree* loop = phase->get_loop(ctrl);
1086     if (loop->_head->is_OuterStripMinedLoop()) {
1087       // Expanding a barrier here will break loop strip mining
1088       // verification. Transform the loop so the loop nest doesn't
1089       // appear as strip mined.
1090       OuterStripMinedLoopNode* outer = loop->_head->as_OuterStripMinedLoop();
1091       hide_strip_mined_loop(outer, outer->unique_ctrl_out()->as_CountedLoop(), phase);
1092     }
1093   }
1094 
1095   Node_Stack stack(0);
1096   Node_List clones;
1097   for (int i = state->load_reference_barriers_count() - 1; i >= 0; i--) {
1098     ShenandoahLoadReferenceBarrierNode* lrb = state->load_reference_barrier(i);



1099 
1100     Node* ctrl = phase->get_ctrl(lrb);
1101     Node* val = lrb->in(ShenandoahLoadReferenceBarrierNode::ValueIn);
1102 
1103     CallStaticJavaNode* unc = NULL;
1104     Node* unc_ctrl = NULL;
1105     Node* uncasted_val = val;
1106 
1107     for (DUIterator_Fast imax, i = lrb->fast_outs(imax); i < imax; i++) {
1108       Node* u = lrb->fast_out(i);
1109       if (u->Opcode() == Op_CastPP &&
1110           u->in(0) != NULL &&
1111           phase->is_dominator(u->in(0), ctrl)) {
1112         const Type* u_t = phase->igvn().type(u);
1113 
1114         if (u_t->meet(TypePtr::NULL_PTR) != u_t &&
1115             u->in(0)->Opcode() == Op_IfTrue &&
1116             u->in(0)->as_Proj()->is_uncommon_trap_if_pattern(Deoptimization::Reason_none) &&
1117             u->in(0)->in(0)->is_If() &&
1118             u->in(0)->in(0)->in(1)->Opcode() == Op_Bool &&


1270                   assert(nb > 0, "should have replaced some uses");
1271                 }
1272                 replaced = true;
1273               }
1274             }
1275             if (!replaced) {
1276               stack.set_index(idx+1);
1277             }
1278           }
1279         } else {
1280           stack.pop();
1281           clones.pop();
1282         }
1283       } while (stack.size() > 0);
1284       assert(stack.size() == 0 && clones.size() == 0, "");
1285     }
1286   }
1287 
1288   for (int i = 0; i < state->load_reference_barriers_count(); i++) {
1289     ShenandoahLoadReferenceBarrierNode* lrb = state->load_reference_barrier(i);



1290     Node* ctrl = phase->get_ctrl(lrb);
1291     IdealLoopTree* loop = phase->get_loop(ctrl);
1292     if (loop->_head->is_OuterStripMinedLoop()) {
1293       // Expanding a barrier here will break loop strip mining
1294       // verification. Transform the loop so the loop nest doesn't
1295       // appear as strip mined.
1296       OuterStripMinedLoopNode* outer = loop->_head->as_OuterStripMinedLoop();
1297       hide_strip_mined_loop(outer, outer->unique_ctrl_out()->as_CountedLoop(), phase);
1298     }
1299   }
1300 
1301   // Expand load-reference-barriers
1302   MemoryGraphFixer fixer(Compile::AliasIdxRaw, true, phase);
1303   Unique_Node_List uses_to_ignore;
1304   for (int i = state->load_reference_barriers_count() - 1; i >= 0; i--) {
1305     ShenandoahLoadReferenceBarrierNode* lrb = state->load_reference_barrier(i);




1306     uint last = phase->C->unique();
1307     Node* ctrl = phase->get_ctrl(lrb);
1308     Node* val = lrb->in(ShenandoahLoadReferenceBarrierNode::ValueIn);
1309 
1310 
1311     Node* orig_ctrl = ctrl;
1312 
1313     Node* raw_mem = fixer.find_mem(ctrl, lrb);
1314     Node* init_raw_mem = raw_mem;
1315     Node* raw_mem_for_ctrl = fixer.find_mem(ctrl, NULL);
1316 
1317     IdealLoopTree *loop = phase->get_loop(ctrl);
1318 
1319     Node* heap_stable_ctrl = NULL;
1320     Node* null_ctrl = NULL;
1321 
1322     assert(val->bottom_type()->make_oopptr(), "need oop");
1323     assert(val->bottom_type()->make_oopptr()->const_oop() == NULL, "expect non-constant");
1324 
1325     enum { _heap_stable = 1, _not_cset, _evac_path, PATH_LIMIT };


2997       return true;
2998     case Op_CMoveN:
2999     case Op_CMoveP:
3000       return needs_barrier_impl(phase, n->in(2), visited) ||
3001              needs_barrier_impl(phase, n->in(3), visited);
3002     case Op_ShenandoahEnqueueBarrier:
3003       return needs_barrier_impl(phase, n->in(1), visited);
3004     case Op_CreateEx:
3005       return false;
3006     default:
3007       break;
3008   }
3009 #ifdef ASSERT
3010   tty->print("need barrier on?: ");
3011   tty->print_cr("ins:");
3012   n->dump(2);
3013   tty->print_cr("outs:");
3014   n->dump(-2);
3015   ShouldNotReachHere();
3016 #endif
































































































































































3017   return true;
3018 }
< prev index next >