261 tty->print(" {%d..}=>%d", lo(), dest());
262 else
263 tty->print(" {%d..%d}=>%d", lo(), hi(), dest());
264 }
265 };
266
267
268 //-------------------------------do_tableswitch--------------------------------
269 void Parse::do_tableswitch() {
270 Node* lookup = pop();
271
272 // Get information about tableswitch
273 int default_dest = iter().get_dest_table(0);
274 int lo_index = iter().get_int_table(1);
275 int hi_index = iter().get_int_table(2);
276 int len = hi_index - lo_index + 1;
277
278 if (len < 1) {
279 // If this is a backward branch, add safepoint
280 maybe_add_safepoint(default_dest);
281 merge(default_dest);
282 return;
283 }
284
285 // generate decision tree, using trichotomy when possible
286 int rnum = len+2;
287 bool makes_backward_branch = false;
288 SwitchRange* ranges = NEW_RESOURCE_ARRAY(SwitchRange, rnum);
289 int rp = -1;
290 if (lo_index != min_jint) {
291 ranges[++rp].setRange(min_jint, lo_index-1, default_dest, NullTableIndex);
292 }
293 for (int j = 0; j < len; j++) {
294 jint match_int = lo_index+j;
295 int dest = iter().get_dest_table(j+3);
296 makes_backward_branch |= (dest <= bci());
297 int table_index = method_data_update() ? j : NullTableIndex;
298 if (rp < 0 || !ranges[rp].adjoin(match_int, dest, table_index)) {
299 ranges[++rp].set(match_int, dest, table_index);
300 }
307 }
308 assert(rp < len+2, "not too many ranges");
309
310 // Safepoint in case if backward branch observed
311 if( makes_backward_branch && UseLoopSafepoints )
312 add_safepoint();
313
314 jump_switch_ranges(lookup, &ranges[0], &ranges[rp]);
315 }
316
317
318 //------------------------------do_lookupswitch--------------------------------
319 void Parse::do_lookupswitch() {
320 Node *lookup = pop(); // lookup value
321 // Get information about lookupswitch
322 int default_dest = iter().get_dest_table(0);
323 int len = iter().get_int_table(1);
324
325 if (len < 1) { // If this is a backward branch, add safepoint
326 maybe_add_safepoint(default_dest);
327 merge(default_dest);
328 return;
329 }
330
331 // generate decision tree, using trichotomy when possible
332 jint* table = NEW_RESOURCE_ARRAY(jint, len*2);
333 {
334 for( int j = 0; j < len; j++ ) {
335 table[j+j+0] = iter().get_int_table(2+j+j);
336 table[j+j+1] = iter().get_dest_table(2+j+j+1);
337 }
338 qsort( table, len, 2*sizeof(table[0]), jint_cmp );
339 }
340
341 int rnum = len*2+1;
342 bool makes_backward_branch = false;
343 SwitchRange* ranges = NEW_RESOURCE_ARRAY(SwitchRange, rnum);
344 int rp = -1;
345 for( int j = 0; j < len; j++ ) {
346 jint match_int = table[j+j+0];
714
715 // Store information about current state, tagged with new _jsr_bci
716 int return_bci = iter().next_bci();
717 int jsr_bci = (bc() == Bytecodes::_jsr) ? iter().get_dest() : iter().get_far_dest();
718
719 // Update method data
720 profile_taken_branch(jsr_bci);
721
722 // The way we do things now, there is only one successor block
723 // for the jsr, because the target code is cloned by ciTypeFlow.
724 Block* target = successor_for_bci(jsr_bci);
725
726 // What got pushed?
727 const Type* ret_addr = target->peek();
728 assert(ret_addr->singleton(), "must be a constant (cloned jsr body)");
729
730 // Effect on jsr on stack
731 push(_gvn.makecon(ret_addr));
732
733 // Flow to the jsr.
734 merge(jsr_bci);
735 }
736
737 // Handle ret bytecode
738 void Parse::do_ret() {
739 // Find to whom we return.
740 #if 0 // %%%% MAKE THIS WORK
741 Node* con = local();
742 const TypePtr* tp = con->bottom_type()->isa_ptr();
743 assert(tp && tp->singleton(), "");
744 int return_bci = (int) tp->get_con();
745 merge(return_bci);
746 #else
747 assert(block()->num_successors() == 1, "a ret can only go one place now");
748 Block* target = block()->successor_at(0);
749 assert(!target->is_ready(), "our arrival must be expected");
750 profile_ret(target->flow()->start());
751 int pnum = target->next_path_num();
752 merge_common(target, pnum);
753 #endif
864 }
865 prob = PROB_STATIC_FREQUENT; // Likely to take backwards branch
866 }
867
868 assert(prob != PROB_UNKNOWN, "must have some guess at this point");
869 return prob;
870 }
871
872 // The magic constants are chosen so as to match the output of
873 // branch_prediction() when the profile reports a zero taken count.
874 // It is important to distinguish zero counts unambiguously, because
875 // some branches (e.g., _213_javac.Assembler.eliminate) validly produce
876 // very small but nonzero probabilities, which if confused with zero
877 // counts would keep the program recompiling indefinitely.
878 bool Parse::seems_never_taken(float prob) {
879 return prob < PROB_MIN;
880 }
881
882 //-------------------------------repush_if_args--------------------------------
883 // Push arguments of an "if" bytecode back onto the stack by adjusting _sp.
884 inline void Parse::repush_if_args() {
885 #ifndef PRODUCT
886 if (PrintOpto && WizardMode) {
887 tty->print("defending against excessive implicit null exceptions on %s @%d in ",
888 Bytecodes::name(iter().cur_bc()), iter().cur_bci());
889 method()->print_name(); tty->cr();
890 }
891 #endif
892 int bc_depth = - Bytecodes::depth(iter().cur_bc());
893 assert(bc_depth == 1 || bc_depth == 2, "only two kinds of branches");
894 DEBUG_ONLY(sync_jvms()); // argument(n) requires a synced jvms
895 assert(argument(0) != NULL, "must exist");
896 assert(bc_depth == 1 || argument(1) != NULL, "two must exist");
897 _sp += bc_depth;
898 }
899
900 //----------------------------------do_ifnull----------------------------------
901 void Parse::do_ifnull(BoolTest::mask btest, Node *c) {
902 int target_bci = iter().get_dest();
903
904 Block* branch_block = successor_for_bci(target_bci);
905 Block* next_block = successor_for_bci(iter().next_bci());
906
907 float cnt;
908 float prob = branch_prediction(cnt, btest, target_bci);
909 if (prob == PROB_UNKNOWN) {
910 // (An earlier version of do_ifnull omitted this trap for OSR methods.)
911 #ifndef PRODUCT
912 if (PrintOpto && Verbose)
913 tty->print_cr("Never-taken edge stops compilation at bci %d",bci());
914 #endif
915 repush_if_args(); // to gather stats on loop
916 // We need to mark this branch as taken so that if we recompile we will
917 // see that it is possible. In the tiered system the interpreter doesn't
937 // Sanity check the probability value
938 assert(prob > 0.0f,"Bad probability in Parser");
939 // Need xform to put node in hash table
940 IfNode *iff = create_and_xform_if( control(), tst, prob, cnt );
941 assert(iff->_prob > 0.0f,"Optimizer made bad probability in parser");
942 // True branch
943 { PreserveJVMState pjvms(this);
944 Node* iftrue = _gvn.transform( new (C, 1) IfTrueNode (iff) );
945 set_control(iftrue);
946
947 if (stopped()) { // Path is dead?
948 explicit_null_checks_elided++;
949 if (EliminateAutoBox) {
950 // Mark the successor block as parsed
951 branch_block->next_path_num();
952 }
953 } else { // Path is live.
954 // Update method data
955 profile_taken_branch(target_bci);
956 adjust_map_after_if(btest, c, prob, branch_block, next_block);
957 if (!stopped())
958 merge(target_bci);
959 }
960 }
961
962 // False branch
963 Node* iffalse = _gvn.transform( new (C, 1) IfFalseNode(iff) );
964 set_control(iffalse);
965
966 if (stopped()) { // Path is dead?
967 explicit_null_checks_elided++;
968 if (EliminateAutoBox) {
969 // Mark the successor block as parsed
970 next_block->next_path_num();
971 }
972 } else { // Path is live.
973 // Update method data
974 profile_not_taken_branch();
975 adjust_map_after_if(BoolTest(btest).negate(), c, 1.0-prob,
976 next_block, branch_block);
977 }
978 }
979
980 //------------------------------------do_if------------------------------------
1059 if (!taken_if_true) { // Finish conversion to canonical form
1060 Node* tmp = taken_branch;
1061 taken_branch = untaken_branch;
1062 untaken_branch = tmp;
1063 }
1064
1065 // Branch is taken:
1066 { PreserveJVMState pjvms(this);
1067 taken_branch = _gvn.transform(taken_branch);
1068 set_control(taken_branch);
1069
1070 if (stopped()) {
1071 if (EliminateAutoBox) {
1072 // Mark the successor block as parsed
1073 branch_block->next_path_num();
1074 }
1075 } else {
1076 // Update method data
1077 profile_taken_branch(target_bci);
1078 adjust_map_after_if(taken_btest, c, prob, branch_block, next_block);
1079 if (!stopped())
1080 merge(target_bci);
1081 }
1082 }
1083
1084 untaken_branch = _gvn.transform(untaken_branch);
1085 set_control(untaken_branch);
1086
1087 // Branch not taken.
1088 if (stopped()) {
1089 if (EliminateAutoBox) {
1090 // Mark the successor block as parsed
1091 next_block->next_path_num();
1092 }
1093 } else {
1094 // Update method data
1095 profile_not_taken_branch();
1096 adjust_map_after_if(untaken_btest, c, untaken_prob,
1097 next_block, branch_block);
1098 }
1099 }
1100
1101 //----------------------------adjust_map_after_if------------------------------
1102 // Adjust the JVM state to reflect the result of taking this path.
2063 // Hook the thrown exception directly to subsequent handlers.
2064 if (BailoutToInterpreterForThrows) {
2065 // Keep method interpreted from now on.
2066 uncommon_trap(Deoptimization::Reason_unhandled,
2067 Deoptimization::Action_make_not_compilable);
2068 return;
2069 }
2070 add_exception_state(make_exception_state(peek()));
2071 break;
2072
2073 case Bytecodes::_goto: // fall through
2074 case Bytecodes::_goto_w: {
2075 int target_bci = (bc() == Bytecodes::_goto) ? iter().get_dest() : iter().get_far_dest();
2076
2077 // If this is a backwards branch in the bytecodes, add Safepoint
2078 maybe_add_safepoint(target_bci);
2079
2080 // Update method data
2081 profile_taken_branch(target_bci);
2082
2083 // Merge the current control into the target basic block
2084 merge(target_bci);
2085
2086 // See if we can get some profile data and hand it off to the next block
2087 Block *target_block = block()->successor_for_bci(target_bci);
2088 if (target_block->pred_count() != 1) break;
2089 ciMethodData* methodData = method()->method_data();
2090 if (!methodData->is_mature()) break;
2091 ciProfileData* data = methodData->bci_to_data(bci());
2092 assert( data->is_JumpData(), "" );
2093 int taken = ((ciJumpData*)data)->taken();
2094 taken = method()->scale_count(taken);
2095 target_block->set_count(taken);
2096 break;
2097 }
2098
2099 case Bytecodes::_ifnull: btest = BoolTest::eq; goto handle_if_null;
2100 case Bytecodes::_ifnonnull: btest = BoolTest::ne; goto handle_if_null;
2101 handle_if_null:
2102 // If this is a backwards branch in the bytecodes, add Safepoint
|
261 tty->print(" {%d..}=>%d", lo(), dest());
262 else
263 tty->print(" {%d..%d}=>%d", lo(), hi(), dest());
264 }
265 };
266
267
268 //-------------------------------do_tableswitch--------------------------------
269 void Parse::do_tableswitch() {
270 Node* lookup = pop();
271
272 // Get information about tableswitch
273 int default_dest = iter().get_dest_table(0);
274 int lo_index = iter().get_int_table(1);
275 int hi_index = iter().get_int_table(2);
276 int len = hi_index - lo_index + 1;
277
278 if (len < 1) {
279 // If this is a backward branch, add safepoint
280 maybe_add_safepoint(default_dest);
281 if(should_add_predicate(default_dest)){
282 _sp += 1; //set original stack for use by uncommon_trap
283 add_predicate();
284 _sp -= 1;
285 }
286 merge(default_dest);
287 return;
288 }
289
290 // generate decision tree, using trichotomy when possible
291 int rnum = len+2;
292 bool makes_backward_branch = false;
293 SwitchRange* ranges = NEW_RESOURCE_ARRAY(SwitchRange, rnum);
294 int rp = -1;
295 if (lo_index != min_jint) {
296 ranges[++rp].setRange(min_jint, lo_index-1, default_dest, NullTableIndex);
297 }
298 for (int j = 0; j < len; j++) {
299 jint match_int = lo_index+j;
300 int dest = iter().get_dest_table(j+3);
301 makes_backward_branch |= (dest <= bci());
302 int table_index = method_data_update() ? j : NullTableIndex;
303 if (rp < 0 || !ranges[rp].adjoin(match_int, dest, table_index)) {
304 ranges[++rp].set(match_int, dest, table_index);
305 }
312 }
313 assert(rp < len+2, "not too many ranges");
314
315 // Safepoint in case if backward branch observed
316 if( makes_backward_branch && UseLoopSafepoints )
317 add_safepoint();
318
319 jump_switch_ranges(lookup, &ranges[0], &ranges[rp]);
320 }
321
322
323 //------------------------------do_lookupswitch--------------------------------
324 void Parse::do_lookupswitch() {
325 Node *lookup = pop(); // lookup value
326 // Get information about lookupswitch
327 int default_dest = iter().get_dest_table(0);
328 int len = iter().get_int_table(1);
329
330 if (len < 1) { // If this is a backward branch, add safepoint
331 maybe_add_safepoint(default_dest);
332 if(should_add_predicate(default_dest)){
333 _sp += 1; //set original stack for use by uncommon_trap
334 add_predicate();
335 _sp -= 1;
336 }
337 merge(default_dest);
338 return;
339 }
340
341 // generate decision tree, using trichotomy when possible
342 jint* table = NEW_RESOURCE_ARRAY(jint, len*2);
343 {
344 for( int j = 0; j < len; j++ ) {
345 table[j+j+0] = iter().get_int_table(2+j+j);
346 table[j+j+1] = iter().get_dest_table(2+j+j+1);
347 }
348 qsort( table, len, 2*sizeof(table[0]), jint_cmp );
349 }
350
351 int rnum = len*2+1;
352 bool makes_backward_branch = false;
353 SwitchRange* ranges = NEW_RESOURCE_ARRAY(SwitchRange, rnum);
354 int rp = -1;
355 for( int j = 0; j < len; j++ ) {
356 jint match_int = table[j+j+0];
724
725 // Store information about current state, tagged with new _jsr_bci
726 int return_bci = iter().next_bci();
727 int jsr_bci = (bc() == Bytecodes::_jsr) ? iter().get_dest() : iter().get_far_dest();
728
729 // Update method data
730 profile_taken_branch(jsr_bci);
731
732 // The way we do things now, there is only one successor block
733 // for the jsr, because the target code is cloned by ciTypeFlow.
734 Block* target = successor_for_bci(jsr_bci);
735
736 // What got pushed?
737 const Type* ret_addr = target->peek();
738 assert(ret_addr->singleton(), "must be a constant (cloned jsr body)");
739
740 // Effect on jsr on stack
741 push(_gvn.makecon(ret_addr));
742
743 // Flow to the jsr.
744 if(should_add_predicate(jsr_bci)){
745 add_predicate();
746 }
747 merge(jsr_bci);
748 }
749
750 // Handle ret bytecode
751 void Parse::do_ret() {
752 // Find to whom we return.
753 #if 0 // %%%% MAKE THIS WORK
754 Node* con = local();
755 const TypePtr* tp = con->bottom_type()->isa_ptr();
756 assert(tp && tp->singleton(), "");
757 int return_bci = (int) tp->get_con();
758 merge(return_bci);
759 #else
760 assert(block()->num_successors() == 1, "a ret can only go one place now");
761 Block* target = block()->successor_at(0);
762 assert(!target->is_ready(), "our arrival must be expected");
763 profile_ret(target->flow()->start());
764 int pnum = target->next_path_num();
765 merge_common(target, pnum);
766 #endif
877 }
878 prob = PROB_STATIC_FREQUENT; // Likely to take backwards branch
879 }
880
881 assert(prob != PROB_UNKNOWN, "must have some guess at this point");
882 return prob;
883 }
884
885 // The magic constants are chosen so as to match the output of
886 // branch_prediction() when the profile reports a zero taken count.
887 // It is important to distinguish zero counts unambiguously, because
888 // some branches (e.g., _213_javac.Assembler.eliminate) validly produce
889 // very small but nonzero probabilities, which if confused with zero
890 // counts would keep the program recompiling indefinitely.
891 bool Parse::seems_never_taken(float prob) {
892 return prob < PROB_MIN;
893 }
894
895 //-------------------------------repush_if_args--------------------------------
896 // Push arguments of an "if" bytecode back onto the stack by adjusting _sp.
897 inline int Parse::repush_if_args() {
898 #ifndef PRODUCT
899 if (PrintOpto && WizardMode) {
900 tty->print("defending against excessive implicit null exceptions on %s @%d in ",
901 Bytecodes::name(iter().cur_bc()), iter().cur_bci());
902 method()->print_name(); tty->cr();
903 }
904 #endif
905 int bc_depth = - Bytecodes::depth(iter().cur_bc());
906 assert(bc_depth == 1 || bc_depth == 2, "only two kinds of branches");
907 DEBUG_ONLY(sync_jvms()); // argument(n) requires a synced jvms
908 assert(argument(0) != NULL, "must exist");
909 assert(bc_depth == 1 || argument(1) != NULL, "two must exist");
910 _sp += bc_depth;
911 return bc_depth;
912 }
913
914 //----------------------------------do_ifnull----------------------------------
915 void Parse::do_ifnull(BoolTest::mask btest, Node *c) {
916 int target_bci = iter().get_dest();
917
918 Block* branch_block = successor_for_bci(target_bci);
919 Block* next_block = successor_for_bci(iter().next_bci());
920
921 float cnt;
922 float prob = branch_prediction(cnt, btest, target_bci);
923 if (prob == PROB_UNKNOWN) {
924 // (An earlier version of do_ifnull omitted this trap for OSR methods.)
925 #ifndef PRODUCT
926 if (PrintOpto && Verbose)
927 tty->print_cr("Never-taken edge stops compilation at bci %d",bci());
928 #endif
929 repush_if_args(); // to gather stats on loop
930 // We need to mark this branch as taken so that if we recompile we will
931 // see that it is possible. In the tiered system the interpreter doesn't
951 // Sanity check the probability value
952 assert(prob > 0.0f,"Bad probability in Parser");
953 // Need xform to put node in hash table
954 IfNode *iff = create_and_xform_if( control(), tst, prob, cnt );
955 assert(iff->_prob > 0.0f,"Optimizer made bad probability in parser");
956 // True branch
957 { PreserveJVMState pjvms(this);
958 Node* iftrue = _gvn.transform( new (C, 1) IfTrueNode (iff) );
959 set_control(iftrue);
960
961 if (stopped()) { // Path is dead?
962 explicit_null_checks_elided++;
963 if (EliminateAutoBox) {
964 // Mark the successor block as parsed
965 branch_block->next_path_num();
966 }
967 } else { // Path is live.
968 // Update method data
969 profile_taken_branch(target_bci);
970 adjust_map_after_if(btest, c, prob, branch_block, next_block);
971 if (!stopped()) {
972 if(should_add_predicate(target_bci)){//add a predicate if it branches to a loop
973 int nargs = repush_if_args();//set original stack for uncommon_trap
974 add_predicate();
975 _sp -= nargs;
976 }
977 merge(target_bci);
978 }
979 }
980 }
981
982 // False branch
983 Node* iffalse = _gvn.transform( new (C, 1) IfFalseNode(iff) );
984 set_control(iffalse);
985
986 if (stopped()) { // Path is dead?
987 explicit_null_checks_elided++;
988 if (EliminateAutoBox) {
989 // Mark the successor block as parsed
990 next_block->next_path_num();
991 }
992 } else { // Path is live.
993 // Update method data
994 profile_not_taken_branch();
995 adjust_map_after_if(BoolTest(btest).negate(), c, 1.0-prob,
996 next_block, branch_block);
997 }
998 }
999
1000 //------------------------------------do_if------------------------------------
1079 if (!taken_if_true) { // Finish conversion to canonical form
1080 Node* tmp = taken_branch;
1081 taken_branch = untaken_branch;
1082 untaken_branch = tmp;
1083 }
1084
1085 // Branch is taken:
1086 { PreserveJVMState pjvms(this);
1087 taken_branch = _gvn.transform(taken_branch);
1088 set_control(taken_branch);
1089
1090 if (stopped()) {
1091 if (EliminateAutoBox) {
1092 // Mark the successor block as parsed
1093 branch_block->next_path_num();
1094 }
1095 } else {
1096 // Update method data
1097 profile_taken_branch(target_bci);
1098 adjust_map_after_if(taken_btest, c, prob, branch_block, next_block);
1099 if (!stopped()) {
1100 if(should_add_predicate(target_bci)){//add a predicate if it branches to a loop
1101 int nargs = repush_if_args(); //set original stack for the uncommon_trap
1102 add_predicate();
1103 _sp -= nargs;
1104 }
1105 merge(target_bci);
1106 }
1107 }
1108 }
1109
1110 untaken_branch = _gvn.transform(untaken_branch);
1111 set_control(untaken_branch);
1112
1113 // Branch not taken.
1114 if (stopped()) {
1115 if (EliminateAutoBox) {
1116 // Mark the successor block as parsed
1117 next_block->next_path_num();
1118 }
1119 } else {
1120 // Update method data
1121 profile_not_taken_branch();
1122 adjust_map_after_if(untaken_btest, c, untaken_prob,
1123 next_block, branch_block);
1124 }
1125 }
1126
1127 //----------------------------adjust_map_after_if------------------------------
1128 // Adjust the JVM state to reflect the result of taking this path.
2089 // Hook the thrown exception directly to subsequent handlers.
2090 if (BailoutToInterpreterForThrows) {
2091 // Keep method interpreted from now on.
2092 uncommon_trap(Deoptimization::Reason_unhandled,
2093 Deoptimization::Action_make_not_compilable);
2094 return;
2095 }
2096 add_exception_state(make_exception_state(peek()));
2097 break;
2098
2099 case Bytecodes::_goto: // fall through
2100 case Bytecodes::_goto_w: {
2101 int target_bci = (bc() == Bytecodes::_goto) ? iter().get_dest() : iter().get_far_dest();
2102
2103 // If this is a backwards branch in the bytecodes, add Safepoint
2104 maybe_add_safepoint(target_bci);
2105
2106 // Update method data
2107 profile_taken_branch(target_bci);
2108
2109 //Add loop predicate if it goes to a loop
2110 if(should_add_predicate(target_bci)){
2111 add_predicate();
2112 }
2113 // Merge the current control into the target basic block
2114 merge(target_bci);
2115
2116 // See if we can get some profile data and hand it off to the next block
2117 Block *target_block = block()->successor_for_bci(target_bci);
2118 if (target_block->pred_count() != 1) break;
2119 ciMethodData* methodData = method()->method_data();
2120 if (!methodData->is_mature()) break;
2121 ciProfileData* data = methodData->bci_to_data(bci());
2122 assert( data->is_JumpData(), "" );
2123 int taken = ((ciJumpData*)data)->taken();
2124 taken = method()->scale_count(taken);
2125 target_block->set_count(taken);
2126 break;
2127 }
2128
2129 case Bytecodes::_ifnull: btest = BoolTest::eq; goto handle_if_null;
2130 case Bytecodes::_ifnonnull: btest = BoolTest::ne; goto handle_if_null;
2131 handle_if_null:
2132 // If this is a backwards branch in the bytecodes, add Safepoint
|