src/share/vm/opto/parse2.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 8054492 Sdiff src/share/vm/opto

src/share/vm/opto/parse2.cpp

Print this page




 916   }
 917 #endif
 918   int bc_depth = - Bytecodes::depth(iter().cur_bc());
 919   assert(bc_depth == 1 || bc_depth == 2, "only two kinds of branches");
 920   DEBUG_ONLY(sync_jvms());   // argument(n) requires a synced jvms
 921   assert(argument(0) != NULL, "must exist");
 922   assert(bc_depth == 1 || argument(1) != NULL, "two must exist");
 923   inc_sp(bc_depth);
 924   return bc_depth;
 925 }
 926 
 927 //----------------------------------do_ifnull----------------------------------
 928 void Parse::do_ifnull(BoolTest::mask btest, Node *c) {
 929   int target_bci = iter().get_dest();
 930 
 931   Block* branch_block = successor_for_bci(target_bci);
 932   Block* next_block   = successor_for_bci(iter().next_bci());
 933 
 934   float cnt;
 935   float prob = branch_prediction(cnt, btest, target_bci);



 936   if (prob == PROB_UNKNOWN) {
 937     // (An earlier version of do_ifnull omitted this trap for OSR methods.)
 938 #ifndef PRODUCT
 939     if (PrintOpto && Verbose)
 940       tty->print_cr("Never-taken edge stops compilation at bci %d",bci());
 941 #endif
 942     repush_if_args(); // to gather stats on loop
 943     // We need to mark this branch as taken so that if we recompile we will
 944     // see that it is possible. In the tiered system the interpreter doesn't
 945     // do profiling and by the time we get to the lower tier from the interpreter
 946     // the path may be cold again. Make sure it doesn't look untaken
 947     profile_taken_branch(target_bci, !ProfileInterpreter);
 948     uncommon_trap(Deoptimization::Reason_unreached,
 949                   Deoptimization::Action_reinterpret,
 950                   NULL, "cold");
 951     if (C->eliminate_boxing()) {
 952       // Mark the successor blocks as parsed
 953       branch_block->next_path_num();
 954       next_block->next_path_num();
 955     }


2206     Block *target_block = block()->successor_for_bci(target_bci);
2207     if (target_block->pred_count() != 1)  break;
2208     ciMethodData* methodData = method()->method_data();
2209     if (!methodData->is_mature())  break;
2210     ciProfileData* data = methodData->bci_to_data(bci());
2211     assert( data->is_JumpData(), "" );
2212     int taken = ((ciJumpData*)data)->taken();
2213     taken = method()->scale_count(taken);
2214     target_block->set_count(taken);
2215     break;
2216   }
2217 
2218   case Bytecodes::_ifnull:    btest = BoolTest::eq; goto handle_if_null;
2219   case Bytecodes::_ifnonnull: btest = BoolTest::ne; goto handle_if_null;
2220   handle_if_null:
2221     // If this is a backwards branch in the bytecodes, add Safepoint
2222     maybe_add_safepoint(iter().get_dest());
2223     a = null();
2224     b = pop();
2225     if (!_gvn.type(b)->speculative_maybe_null() &&

2226         !too_many_traps(Deoptimization::Reason_speculate_null_check)) {
2227       inc_sp(1);
2228       Node* null_ctl = top();
2229       b = null_check_oop(b, &null_ctl, true, true, true);
2230       assert(null_ctl->is_top(), "no null control here");
2231       dec_sp(1);
2232     }
2233     c = _gvn.transform( new CmpPNode(b, a) );
2234     do_ifnull(btest, c);
2235     break;
2236 
2237   case Bytecodes::_if_acmpeq: btest = BoolTest::eq; goto handle_if_acmp;
2238   case Bytecodes::_if_acmpne: btest = BoolTest::ne; goto handle_if_acmp;
2239   handle_if_acmp:
2240     // If this is a backwards branch in the bytecodes, add Safepoint
2241     maybe_add_safepoint(iter().get_dest());
2242     a = pop();
2243     b = pop();
2244     c = _gvn.transform( new CmpPNode(b, a) );
2245     c = optimize_cmp_with_klass(c);




 916   }
 917 #endif
 918   int bc_depth = - Bytecodes::depth(iter().cur_bc());
 919   assert(bc_depth == 1 || bc_depth == 2, "only two kinds of branches");
 920   DEBUG_ONLY(sync_jvms());   // argument(n) requires a synced jvms
 921   assert(argument(0) != NULL, "must exist");
 922   assert(bc_depth == 1 || argument(1) != NULL, "two must exist");
 923   inc_sp(bc_depth);
 924   return bc_depth;
 925 }
 926 
 927 //----------------------------------do_ifnull----------------------------------
 928 void Parse::do_ifnull(BoolTest::mask btest, Node *c) {
 929   int target_bci = iter().get_dest();
 930 
 931   Block* branch_block = successor_for_bci(target_bci);
 932   Block* next_block   = successor_for_bci(iter().next_bci());
 933 
 934   float cnt;
 935   float prob = branch_prediction(cnt, btest, target_bci);
 936   if (method()->intrinsic_id() == vmIntrinsics::_class_cast) {
 937     prob = PROB_UNLIKELY_MAG(3); // 0.001
 938   }
 939   if (prob == PROB_UNKNOWN) {
 940     // (An earlier version of do_ifnull omitted this trap for OSR methods.)
 941 #ifndef PRODUCT
 942     if (PrintOpto && Verbose)
 943       tty->print_cr("Never-taken edge stops compilation at bci %d",bci());
 944 #endif
 945     repush_if_args(); // to gather stats on loop
 946     // We need to mark this branch as taken so that if we recompile we will
 947     // see that it is possible. In the tiered system the interpreter doesn't
 948     // do profiling and by the time we get to the lower tier from the interpreter
 949     // the path may be cold again. Make sure it doesn't look untaken
 950     profile_taken_branch(target_bci, !ProfileInterpreter);
 951     uncommon_trap(Deoptimization::Reason_unreached,
 952                   Deoptimization::Action_reinterpret,
 953                   NULL, "cold");
 954     if (C->eliminate_boxing()) {
 955       // Mark the successor blocks as parsed
 956       branch_block->next_path_num();
 957       next_block->next_path_num();
 958     }


2209     Block *target_block = block()->successor_for_bci(target_bci);
2210     if (target_block->pred_count() != 1)  break;
2211     ciMethodData* methodData = method()->method_data();
2212     if (!methodData->is_mature())  break;
2213     ciProfileData* data = methodData->bci_to_data(bci());
2214     assert( data->is_JumpData(), "" );
2215     int taken = ((ciJumpData*)data)->taken();
2216     taken = method()->scale_count(taken);
2217     target_block->set_count(taken);
2218     break;
2219   }
2220 
2221   case Bytecodes::_ifnull:    btest = BoolTest::eq; goto handle_if_null;
2222   case Bytecodes::_ifnonnull: btest = BoolTest::ne; goto handle_if_null;
2223   handle_if_null:
2224     // If this is a backwards branch in the bytecodes, add Safepoint
2225     maybe_add_safepoint(iter().get_dest());
2226     a = null();
2227     b = pop();
2228     if (!_gvn.type(b)->speculative_maybe_null() &&
2229         (method()->intrinsic_id() != vmIntrinsics::_class_cast) &&
2230         !too_many_traps(Deoptimization::Reason_speculate_null_check)) {
2231       inc_sp(1);
2232       Node* null_ctl = top();
2233       b = null_check_oop(b, &null_ctl, true, true, true);
2234       assert(null_ctl->is_top(), "no null control here");
2235       dec_sp(1);
2236     }
2237     c = _gvn.transform( new CmpPNode(b, a) );
2238     do_ifnull(btest, c);
2239     break;
2240 
2241   case Bytecodes::_if_acmpeq: btest = BoolTest::eq; goto handle_if_acmp;
2242   case Bytecodes::_if_acmpne: btest = BoolTest::ne; goto handle_if_acmp;
2243   handle_if_acmp:
2244     // If this is a backwards branch in the bytecodes, add Safepoint
2245     maybe_add_safepoint(iter().get_dest());
2246     a = pop();
2247     b = pop();
2248     c = _gvn.transform( new CmpPNode(b, a) );
2249     c = optimize_cmp_with_klass(c);


src/share/vm/opto/parse2.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File