src/share/vm/opto/bytecodeInfo.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 8028468 Sdiff src/share/vm/opto

src/share/vm/opto/bytecodeInfo.cpp

Print this page




  33 #include "opto/callGenerator.hpp"
  34 #include "opto/parse.hpp"
  35 #include "runtime/handles.inline.hpp"
  36 
  37 //=============================================================================
  38 //------------------------------InlineTree-------------------------------------
  39 InlineTree::InlineTree(Compile* c,
  40                        const InlineTree *caller_tree, ciMethod* callee,
  41                        JVMState* caller_jvms, int caller_bci,
  42                        float site_invoke_ratio, int max_inline_level) :
  43   C(c),
  44   _caller_jvms(caller_jvms),
  45   _caller_tree((InlineTree*) caller_tree),
  46   _method(callee),
  47   _site_invoke_ratio(site_invoke_ratio),
  48   _max_inline_level(max_inline_level),
  49   _count_inline_bcs(method()->code_size_for_inlining()),
  50   _subtrees(c->comp_arena(), 2, 0, NULL),
  51   _msg(NULL)
  52 {
  53   NOT_PRODUCT(_count_inlines = 0;)



  54   if (_caller_jvms != NULL) {
  55     // Keep a private copy of the caller_jvms:
  56     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
  57     _caller_jvms->set_bci(caller_jvms->bci());
  58     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
  59   }
  60   assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
  61   assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
  62   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
  63   if (UseOldInlining) {
  64     // Update hierarchical counts, count_inline_bcs() and count_inlines()
  65     InlineTree *caller = (InlineTree *)caller_tree;
  66     for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
  67       caller->_count_inline_bcs += count_inline_bcs();
  68       NOT_PRODUCT(caller->_count_inlines++;)
  69     }
  70   }
  71 }
  72 
  73 InlineTree::InlineTree(Compile* c, ciMethod* callee_method, JVMState* caller_jvms,
  74                        float site_invoke_ratio, int max_inline_level) :
  75   C(c),
  76   _caller_jvms(caller_jvms),
  77   _caller_tree(NULL),
  78   _method(callee_method),
  79   _site_invoke_ratio(site_invoke_ratio),
  80   _max_inline_level(max_inline_level),
  81   _count_inline_bcs(method()->code_size()),
  82   _msg(NULL)
  83 {
  84   NOT_PRODUCT(_count_inlines = 0;)



  85   assert(!UseOldInlining, "do not use for old stuff");
  86 }
  87 
  88 /**
  89  *  Return true when EA is ON and a java constructor is called or
  90  *  a super constructor is called from an inlined java constructor.
  91  *  Also return true for boxing methods.
  92  */
  93 static bool is_init_with_ea(ciMethod* callee_method,
  94                             ciMethod* caller_method, Compile* C) {
  95   if (!C->do_escape_analysis() || !EliminateAllocations) {
  96     return false; // EA is off
  97   }
  98   if (callee_method->is_initializer()) {
  99     return true; // constuctor
 100   }
 101   if (caller_method->is_initializer() &&
 102       caller_method != C->method() &&
 103       caller_method->holder()->is_subclass_of(callee_method->holder())) {
 104     return true; // super constructor is called from inlined constructor


 111 
 112 /**
 113  *  Force inlining unboxing accessor.
 114  */
 115 static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
 116   return C->eliminate_boxing() && callee_method->is_unboxing_method();
 117 }
 118 
 119 // positive filter: should callee be inlined?
 120 bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
 121                                int caller_bci, ciCallProfile& profile,
 122                                WarmCallInfo* wci_result) {
 123   // Allows targeted inlining
 124   if(callee_method->should_inline()) {
 125     *wci_result = *(WarmCallInfo::always_hot());
 126     if (C->print_inlining() && Verbose) {
 127       CompileTask::print_inline_indent(inline_level());
 128       tty->print_cr("Inlined method is hot: ");
 129     }
 130     set_msg("force inline by CompilerOracle");

 131     return true;
 132   }
 133 









 134   int size = callee_method->code_size_for_inlining();
 135 
 136   // Check for too many throws (and not too huge)
 137   if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
 138      size < InlineThrowMaxSize ) {
 139     wci_result->set_profit(wci_result->profit() * 100);
 140     if (C->print_inlining() && Verbose) {
 141       CompileTask::print_inline_indent(inline_level());
 142       tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
 143     }
 144     set_msg("many throws");
 145     return true;
 146   }
 147 
 148   if (!UseOldInlining) {
 149     set_msg("!UseOldInlining");
 150     return true;  // size and frequency are represented in a new way
 151   }
 152 
 153   int default_max_inline_size = C->max_inline_size();


 247     fail_msg = "unloaded signature classes";
 248   }
 249 
 250   if (fail_msg != NULL) {
 251     set_msg(fail_msg);
 252     return true;
 253   }
 254 
 255   // ignore heuristic controls on inlining
 256   if (callee_method->should_inline()) {
 257     set_msg("force inline by CompilerOracle");
 258     return false;
 259   }
 260 
 261   if (callee_method->should_not_inline()) {
 262     set_msg("disallowed by CompilerOracle");
 263     return true;
 264   }
 265 
 266 #ifndef PRODUCT












 267   if (ciReplay::should_not_inline(callee_method)) {
 268     set_msg("disallowed by ciReplay");
 269     return true;
 270   }
 271 #endif
 272 
 273   // Now perform checks which are heuristic
 274 
 275   if (is_unboxing_method(callee_method, C)) {
 276     // Inline unboxing methods.
 277     return false;
 278   }
 279 
 280   if (!callee_method->force_inline()) {
 281     if (callee_method->has_compiled_code() &&
 282         callee_method->instructions_size() > InlineSmallCode) {
 283       set_msg("already compiled into a big method");
 284       return true;
 285     }
 286   }


 326 }
 327 
 328 //-----------------------------try_to_inline-----------------------------------
 329 // return true if ok
 330 // Relocated from "InliningClosure::try_to_inline"
 331 bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
 332                                int caller_bci, JVMState* jvms, ciCallProfile& profile,
 333                                WarmCallInfo* wci_result, bool& should_delay) {
 334 
 335    // Old algorithm had funny accumulating BC-size counters
 336   if (UseOldInlining && ClipInlining
 337       && (int)count_inline_bcs() >= DesiredMethodLimit) {
 338     if (!callee_method->force_inline() || !IncrementalInline) {
 339       set_msg("size > DesiredMethodLimit");
 340       return false;
 341     } else if (!C->inlining_incrementally()) {
 342       should_delay = true;
 343     }
 344   }
 345 

 346   if (!should_inline(callee_method, caller_method, caller_bci, profile,
 347                      wci_result)) {
 348     return false;
 349   }
 350   if (should_not_inline(callee_method, caller_method, jvms, wci_result)) {
 351     return false;
 352   }
 353 
 354   if (InlineAccessors && callee_method->is_accessor()) {
 355     // accessor methods are not subject to any of the following limits.
 356     set_msg("accessor");
 357     return true;
 358   }
 359 
 360   // suppress a few checks for accessors and trivial methods
 361   if (callee_method->code_size() > MaxTrivialSize) {
 362 
 363     // don't inline into giant methods
 364     if (C->over_inlining_cutoff()) {
 365       if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
 366           || !IncrementalInline) {
 367         set_msg("NodeCountInliningCutoff");
 368         return false;
 369       } else {
 370         should_delay = true;
 371       }
 372     }
 373 
 374     if ((!UseInterpreter || CompileTheWorld) &&
 375         is_init_with_ea(callee_method, caller_method, C)) {
 376 
 377       // Escape Analysis stress testing when running Xcomp or CTW:
 378       // inline constructors even if they are not reached.
 379 

 380     } else if (profile.count() == 0) {
 381       // don't inline unreached call sites
 382        set_msg("call site not reached");
 383        return false;
 384     }
 385   }
 386 
 387   if (!C->do_inlining() && InlineAccessors) {
 388     set_msg("not an accessor");
 389     return false;
 390   }
 391   if (inline_level() > _max_inline_level) {
 392     if (callee_method->force_inline() && inline_level() > MaxForceInlineLevel) {
 393       set_msg("MaxForceInlineLevel");
 394       return false;
 395     }
 396     if (!callee_method->force_inline() || !IncrementalInline) {
 397       set_msg("inlining too deep");
 398       return false;
 399     } else if (!C->inlining_incrementally()) {


 683   InlineTree* iltp = root;
 684   uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
 685   for (uint d = 1; d <= depth; d++) {
 686     JVMState* jvmsp  = jvms->of_depth(d);
 687     // Select the corresponding subtree for this bci.
 688     assert(jvmsp->method() == iltp->method(), "tree still in sync");
 689     ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
 690     InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
 691     if (sub == NULL) {
 692       if (d == depth) {
 693         sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
 694       }
 695       guarantee(sub != NULL, "should be a sub-ilt here");
 696       return sub;
 697     }
 698     iltp = sub;
 699   }
 700   return iltp;
 701 }
 702 
















 703 
 704 
 705 #ifndef PRODUCT
 706 void InlineTree::print_impl(outputStream* st, int indent) const {
 707   for (int i = 0; i < indent; i++) st->print(" ");
 708   st->print(" @ %d ", caller_bci());
 709   method()->print_short_name(st);
 710   st->cr();
 711 
 712   for (int i = 0 ; i < _subtrees.length(); i++) {
 713     _subtrees.at(i)->print_impl(st, indent + 2);
 714   }
 715 }
 716 
 717 void InlineTree::print_value_on(outputStream* st) const {
 718   print_impl(st, 2);
 719 }
 720 #endif


  33 #include "opto/callGenerator.hpp"
  34 #include "opto/parse.hpp"
  35 #include "runtime/handles.inline.hpp"
  36 
  37 //=============================================================================
  38 //------------------------------InlineTree-------------------------------------
  39 InlineTree::InlineTree(Compile* c,
  40                        const InlineTree *caller_tree, ciMethod* callee,
  41                        JVMState* caller_jvms, int caller_bci,
  42                        float site_invoke_ratio, int max_inline_level) :
  43   C(c),
  44   _caller_jvms(caller_jvms),
  45   _caller_tree((InlineTree*) caller_tree),
  46   _method(callee),
  47   _site_invoke_ratio(site_invoke_ratio),
  48   _max_inline_level(max_inline_level),
  49   _count_inline_bcs(method()->code_size_for_inlining()),
  50   _subtrees(c->comp_arena(), 2, 0, NULL),
  51   _msg(NULL)
  52 {
  53 #ifndef PRODUCT
  54   _count_inlines = 0;
  55   _forced_inline = false;
  56 #endif
  57   if (_caller_jvms != NULL) {
  58     // Keep a private copy of the caller_jvms:
  59     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
  60     _caller_jvms->set_bci(caller_jvms->bci());
  61     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
  62   }
  63   assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
  64   assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
  65   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
  66   if (UseOldInlining) {
  67     // Update hierarchical counts, count_inline_bcs() and count_inlines()
  68     InlineTree *caller = (InlineTree *)caller_tree;
  69     for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
  70       caller->_count_inline_bcs += count_inline_bcs();
  71       NOT_PRODUCT(caller->_count_inlines++;)
  72     }
  73   }
  74 }
  75 
  76 InlineTree::InlineTree(Compile* c, ciMethod* callee_method, JVMState* caller_jvms,
  77                        float site_invoke_ratio, int max_inline_level) :
  78   C(c),
  79   _caller_jvms(caller_jvms),
  80   _caller_tree(NULL),
  81   _method(callee_method),
  82   _site_invoke_ratio(site_invoke_ratio),
  83   _max_inline_level(max_inline_level),
  84   _count_inline_bcs(method()->code_size()),
  85   _msg(NULL)
  86 {
  87 #ifndef PRODUCT
  88   _count_inlines = 0;
  89   _forced_inline = false;
  90 #endif
  91   assert(!UseOldInlining, "do not use for old stuff");
  92 }
  93 
  94 /**
  95  *  Return true when EA is ON and a java constructor is called or
  96  *  a super constructor is called from an inlined java constructor.
  97  *  Also return true for boxing methods.
  98  */
  99 static bool is_init_with_ea(ciMethod* callee_method,
 100                             ciMethod* caller_method, Compile* C) {
 101   if (!C->do_escape_analysis() || !EliminateAllocations) {
 102     return false; // EA is off
 103   }
 104   if (callee_method->is_initializer()) {
 105     return true; // constuctor
 106   }
 107   if (caller_method->is_initializer() &&
 108       caller_method != C->method() &&
 109       caller_method->holder()->is_subclass_of(callee_method->holder())) {
 110     return true; // super constructor is called from inlined constructor


 117 
 118 /**
 119  *  Force inlining unboxing accessor.
 120  */
 121 static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
 122   return C->eliminate_boxing() && callee_method->is_unboxing_method();
 123 }
 124 
 125 // positive filter: should callee be inlined?
 126 bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
 127                                int caller_bci, ciCallProfile& profile,
 128                                WarmCallInfo* wci_result) {
 129   // Allows targeted inlining
 130   if(callee_method->should_inline()) {
 131     *wci_result = *(WarmCallInfo::always_hot());
 132     if (C->print_inlining() && Verbose) {
 133       CompileTask::print_inline_indent(inline_level());
 134       tty->print_cr("Inlined method is hot: ");
 135     }
 136     set_msg("force inline by CompilerOracle");
 137     _forced_inline = true;
 138     return true;
 139   }
 140 
 141 #ifndef PRODUCT
 142   int inline_depth = inline_level()+1;
 143   if (ciReplay::should_inline(C->_replay_inline_data, callee_method, caller_bci, inline_depth)) {
 144     set_msg("force inline by ciReplay");
 145     _forced_inline = true;
 146     return true;
 147   }
 148 #endif
 149 
 150   int size = callee_method->code_size_for_inlining();
 151 
 152   // Check for too many throws (and not too huge)
 153   if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
 154      size < InlineThrowMaxSize ) {
 155     wci_result->set_profit(wci_result->profit() * 100);
 156     if (C->print_inlining() && Verbose) {
 157       CompileTask::print_inline_indent(inline_level());
 158       tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
 159     }
 160     set_msg("many throws");
 161     return true;
 162   }
 163 
 164   if (!UseOldInlining) {
 165     set_msg("!UseOldInlining");
 166     return true;  // size and frequency are represented in a new way
 167   }
 168 
 169   int default_max_inline_size = C->max_inline_size();


 263     fail_msg = "unloaded signature classes";
 264   }
 265 
 266   if (fail_msg != NULL) {
 267     set_msg(fail_msg);
 268     return true;
 269   }
 270 
 271   // ignore heuristic controls on inlining
 272   if (callee_method->should_inline()) {
 273     set_msg("force inline by CompilerOracle");
 274     return false;
 275   }
 276 
 277   if (callee_method->should_not_inline()) {
 278     set_msg("disallowed by CompilerOracle");
 279     return true;
 280   }
 281 
 282 #ifndef PRODUCT
 283   int caller_bci = jvms->bci();
 284   int inline_depth = inline_level()+1;
 285   if (ciReplay::should_inline(C->_replay_inline_data, callee_method, caller_bci, inline_depth)) {
 286     set_msg("force inline by ciReplay");
 287     return false;
 288   }
 289 
 290   if (ciReplay::should_not_inline(C->_replay_inline_data, callee_method, caller_bci, inline_depth)) {
 291     set_msg("disallowed by ciReplay");
 292     return true;
 293   }
 294 
 295   if (ciReplay::should_not_inline(callee_method)) {
 296     set_msg("disallowed by ciReplay");
 297     return true;
 298   }
 299 #endif
 300 
 301   // Now perform checks which are heuristic
 302 
 303   if (is_unboxing_method(callee_method, C)) {
 304     // Inline unboxing methods.
 305     return false;
 306   }
 307 
 308   if (!callee_method->force_inline()) {
 309     if (callee_method->has_compiled_code() &&
 310         callee_method->instructions_size() > InlineSmallCode) {
 311       set_msg("already compiled into a big method");
 312       return true;
 313     }
 314   }


 354 }
 355 
 356 //-----------------------------try_to_inline-----------------------------------
 357 // return true if ok
 358 // Relocated from "InliningClosure::try_to_inline"
 359 bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
 360                                int caller_bci, JVMState* jvms, ciCallProfile& profile,
 361                                WarmCallInfo* wci_result, bool& should_delay) {
 362 
 363    // Old algorithm had funny accumulating BC-size counters
 364   if (UseOldInlining && ClipInlining
 365       && (int)count_inline_bcs() >= DesiredMethodLimit) {
 366     if (!callee_method->force_inline() || !IncrementalInline) {
 367       set_msg("size > DesiredMethodLimit");
 368       return false;
 369     } else if (!C->inlining_incrementally()) {
 370       should_delay = true;
 371     }
 372   }
 373 
 374   _forced_inline = false; // Reset
 375   if (!should_inline(callee_method, caller_method, caller_bci, profile,
 376                      wci_result)) {
 377     return false;
 378   }
 379   if (should_not_inline(callee_method, caller_method, jvms, wci_result)) {
 380     return false;
 381   }
 382 
 383   if (InlineAccessors && callee_method->is_accessor()) {
 384     // accessor methods are not subject to any of the following limits.
 385     set_msg("accessor");
 386     return true;
 387   }
 388 
 389   // suppress a few checks for accessors and trivial methods
 390   if (callee_method->code_size() > MaxTrivialSize) {
 391 
 392     // don't inline into giant methods
 393     if (C->over_inlining_cutoff()) {
 394       if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
 395           || !IncrementalInline) {
 396         set_msg("NodeCountInliningCutoff");
 397         return false;
 398       } else {
 399         should_delay = true;
 400       }
 401     }
 402 
 403     if ((!UseInterpreter || CompileTheWorld) &&
 404         is_init_with_ea(callee_method, caller_method, C)) {

 405       // Escape Analysis stress testing when running Xcomp or CTW:
 406       // inline constructors even if they are not reached.
 407     } else if (forced_inline()) {
 408       // Inlining was forced by CompilerOracle or ciReplay
 409     } else if (profile.count() == 0) {
 410       // don't inline unreached call sites
 411        set_msg("call site not reached");
 412        return false;
 413     }
 414   }
 415 
 416   if (!C->do_inlining() && InlineAccessors) {
 417     set_msg("not an accessor");
 418     return false;
 419   }
 420   if (inline_level() > _max_inline_level) {
 421     if (callee_method->force_inline() && inline_level() > MaxForceInlineLevel) {
 422       set_msg("MaxForceInlineLevel");
 423       return false;
 424     }
 425     if (!callee_method->force_inline() || !IncrementalInline) {
 426       set_msg("inlining too deep");
 427       return false;
 428     } else if (!C->inlining_incrementally()) {


 712   InlineTree* iltp = root;
 713   uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
 714   for (uint d = 1; d <= depth; d++) {
 715     JVMState* jvmsp  = jvms->of_depth(d);
 716     // Select the corresponding subtree for this bci.
 717     assert(jvmsp->method() == iltp->method(), "tree still in sync");
 718     ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
 719     InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
 720     if (sub == NULL) {
 721       if (d == depth) {
 722         sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
 723       }
 724       guarantee(sub != NULL, "should be a sub-ilt here");
 725       return sub;
 726     }
 727     iltp = sub;
 728   }
 729   return iltp;
 730 }
 731 
 732 // Count number of nodes in this subtree
 733 int InlineTree::count() const {
 734   int result = 1;
 735   for (int i = 0 ; i < _subtrees.length(); i++) {
 736     result += _subtrees.at(i)->count();
 737   }
 738   return result;
 739 }
 740 
 741 void InlineTree::dump_replay_data(outputStream* out) {
 742   out->print(" %d %d ", inline_level(), caller_bci());
 743   method()->dump_name_as_ascii(out);
 744   for (int i = 0 ; i < _subtrees.length(); i++) {
 745     _subtrees.at(i)->dump_replay_data(out);
 746   }
 747 }
 748 
 749 
 750 #ifndef PRODUCT
 751 void InlineTree::print_impl(outputStream* st, int indent) const {
 752   for (int i = 0; i < indent; i++) st->print(" ");
 753   st->print(" @ %d", caller_bci());
 754   method()->print_short_name(st);
 755   st->cr();
 756 
 757   for (int i = 0 ; i < _subtrees.length(); i++) {
 758     _subtrees.at(i)->print_impl(st, indent + 2);
 759   }
 760 }
 761 
 762 void InlineTree::print_value_on(outputStream* st) const {
 763   print_impl(st, 2);
 764 }
 765 #endif
src/share/vm/opto/bytecodeInfo.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File