1 /*
   2  * Copyright (c) 1998, 2010, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "incls/_precompiled.incl"
  26 #include "incls/_bytecodeInfo.cpp.incl"
  27 
  28 //=============================================================================
  29 //------------------------------InlineTree-------------------------------------
  30 InlineTree::InlineTree( Compile* c,
  31                         const InlineTree *caller_tree, ciMethod* callee,
  32                         JVMState* caller_jvms, int caller_bci,
  33                         float site_invoke_ratio, int site_depth_adjust)
  34 : C(c), _caller_jvms(caller_jvms),
  35   _caller_tree((InlineTree*)caller_tree),
  36   _method(callee), _site_invoke_ratio(site_invoke_ratio),
  37   _site_depth_adjust(site_depth_adjust),
  38   _count_inline_bcs(method()->code_size())
  39 {
  40   NOT_PRODUCT(_count_inlines = 0;)
  41   if (_caller_jvms != NULL) {
  42     // Keep a private copy of the caller_jvms:
  43     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
  44     _caller_jvms->set_bci(caller_jvms->bci());
  45     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
  46   }
  47   assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
  48   assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
  49   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
  50   if (UseOldInlining) {
  51     // Update hierarchical counts, count_inline_bcs() and count_inlines()
  52     InlineTree *caller = (InlineTree *)caller_tree;
  53     for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
  54       caller->_count_inline_bcs += count_inline_bcs();
  55       NOT_PRODUCT(caller->_count_inlines++;)
  56     }
  57   }
  58 }
  59 
  60 InlineTree::InlineTree(Compile* c, ciMethod* callee_method, JVMState* caller_jvms,
  61                        float site_invoke_ratio, int site_depth_adjust)
  62 : C(c), _caller_jvms(caller_jvms), _caller_tree(NULL),
  63   _method(callee_method), _site_invoke_ratio(site_invoke_ratio),
  64   _site_depth_adjust(site_depth_adjust),
  65   _count_inline_bcs(method()->code_size())
  66 {
  67   NOT_PRODUCT(_count_inlines = 0;)
  68   assert(!UseOldInlining, "do not use for old stuff");
  69 }
  70 
  71 
  72 
  73 static void print_indent(int depth) {
  74   tty->print("      ");
  75   for (int i = depth; i != 0; --i) tty->print("  ");
  76 }
  77 
  78 static bool is_init_with_ea(ciMethod* callee_method,
  79                             ciMethod* caller_method, Compile* C) {
  80   // True when EA is ON and a java constructor is called or
  81   // a super constructor is called from an inlined java constructor.
  82   return C->do_escape_analysis() && EliminateAllocations &&
  83          ( callee_method->is_initializer() ||
  84            (caller_method->is_initializer() &&
  85             caller_method != C->method() &&
  86             caller_method->holder()->is_subclass_of(callee_method->holder()))
  87          );
  88 }
  89 
  90 // positive filter: should send be inlined?  returns NULL, if yes, or rejection msg
  91 const char* InlineTree::shouldInline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) const {
  92   // Allows targeted inlining
  93   if(callee_method->should_inline()) {
  94     *wci_result = *(WarmCallInfo::always_hot());
  95     if (PrintInlining && Verbose) {
  96       print_indent(inline_depth());
  97       tty->print_cr("Inlined method is hot: ");
  98     }
  99     return NULL;
 100   }
 101 
 102   // positive filter: should send be inlined?  returns NULL (--> yes)
 103   // or rejection msg
 104   int max_size = C->max_inline_size();
 105   int size     = callee_method->code_size();
 106 
 107   // Check for too many throws (and not too huge)
 108   if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
 109      size < InlineThrowMaxSize ) {
 110     wci_result->set_profit(wci_result->profit() * 100);
 111     if (PrintInlining && Verbose) {
 112       print_indent(inline_depth());
 113       tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
 114     }
 115     return NULL;
 116   }
 117 
 118   if (!UseOldInlining) {
 119     return NULL;  // size and frequency are represented in a new way
 120   }
 121 
 122   int call_site_count  = method()->scale_count(profile.count());
 123   int invoke_count     = method()->interpreter_invocation_count();
 124   assert( invoke_count != 0, "Require invokation count greater than zero");
 125   int freq = call_site_count/invoke_count;
 126 
 127   // bump the max size if the call is frequent
 128   if ((freq >= InlineFrequencyRatio) ||
 129       (call_site_count >= InlineFrequencyCount) ||
 130       is_init_with_ea(callee_method, caller_method, C)) {
 131 
 132     max_size = C->freq_inline_size();
 133     if (size <= max_size && TraceFrequencyInlining) {
 134       print_indent(inline_depth());
 135       tty->print_cr("Inlined frequent method (freq=%d count=%d):", freq, call_site_count);
 136       print_indent(inline_depth());
 137       callee_method->print();
 138       tty->cr();
 139     }
 140   } else {
 141     // Not hot.  Check for medium-sized pre-existing nmethod at cold sites.
 142     if (callee_method->has_compiled_code() &&
 143         callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode/4)
 144       return "already compiled into a medium method";
 145   }
 146   if (size > max_size) {
 147     if (max_size > C->max_inline_size())
 148       return "hot method too big";
 149     return "too big";
 150   }
 151   return NULL;
 152 }
 153 
 154 
 155 // negative filter: should send NOT be inlined?  returns NULL, ok to inline, or rejection msg
 156 const char* InlineTree::shouldNotInline(ciMethod *callee_method, ciMethod* caller_method, WarmCallInfo* wci_result) const {
 157   // negative filter: should send NOT be inlined?  returns NULL (--> inline) or rejection msg
 158   if (!UseOldInlining) {
 159     const char* fail = NULL;
 160     if (callee_method->is_abstract())               fail = "abstract method";
 161     // note: we allow ik->is_abstract()
 162     if (!callee_method->holder()->is_initialized()) fail = "method holder not initialized";
 163     if (callee_method->is_native())                 fail = "native method";
 164 
 165     if (fail) {
 166       *wci_result = *(WarmCallInfo::always_cold());
 167       return fail;
 168     }
 169 
 170     if (callee_method->has_unloaded_classes_in_signature()) {
 171       wci_result->set_profit(wci_result->profit() * 0.1);
 172     }
 173 
 174     // don't inline exception code unless the top method belongs to an
 175     // exception class
 176     if (callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
 177       ciMethod* top_method = caller_jvms() ? caller_jvms()->of_depth(1)->method() : method();
 178       if (!top_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
 179         wci_result->set_profit(wci_result->profit() * 0.1);
 180       }
 181     }
 182 
 183     if (callee_method->has_compiled_code() && callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode) {
 184       wci_result->set_profit(wci_result->profit() * 0.1);
 185       // %%% adjust wci_result->size()?
 186     }
 187 
 188     return NULL;
 189   }
 190 
 191   // Always inline MethodHandle methods and generated MethodHandle adapters.
 192   if (callee_method->is_method_handle_invoke() || callee_method->is_method_handle_adapter())
 193     return NULL;
 194 
 195   // First check all inlining restrictions which are required for correctness
 196   if (callee_method->is_abstract())               return "abstract method";
 197   // note: we allow ik->is_abstract()
 198   if (!callee_method->holder()->is_initialized()) return "method holder not initialized";
 199   if (callee_method->is_native())                 return "native method";
 200   if (callee_method->has_unloaded_classes_in_signature()) return "unloaded signature classes";
 201 
 202   if (callee_method->should_inline()) {
 203     // ignore heuristic controls on inlining
 204     return NULL;
 205   }
 206 
 207   // Now perform checks which are heuristic
 208 
 209   if( callee_method->has_compiled_code() && callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode )
 210     return "already compiled into a big method";
 211 
 212   // don't inline exception code unless the top method belongs to an
 213   // exception class
 214   if (caller_tree() != NULL &&
 215       callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
 216     const InlineTree *top = this;
 217     while (top->caller_tree() != NULL) top = top->caller_tree();
 218     ciInstanceKlass* k = top->method()->holder();
 219     if (!k->is_subclass_of(C->env()->Throwable_klass()))
 220       return "exception method";
 221   }
 222 
 223   // use frequency-based objections only for non-trivial methods
 224   if (callee_method->code_size() <= MaxTrivialSize) return NULL;
 225 
 226   // don't use counts with -Xcomp or CTW
 227   if (UseInterpreter && !CompileTheWorld) {
 228 
 229     if (!callee_method->has_compiled_code() &&
 230         !callee_method->was_executed_more_than(0)) {
 231       return "never executed";
 232     }
 233 
 234     if (is_init_with_ea(callee_method, caller_method, C)) {
 235 
 236       // Escape Analysis: inline all executed constructors
 237 
 238     } else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold,
 239                                                            CompileThreshold >> 1))) {
 240       return "executed < MinInliningThreshold times";
 241     }
 242   }
 243 
 244   if (callee_method->should_not_inline()) {
 245     return "disallowed by CompilerOracle";
 246   }
 247 
 248   if (UseStringCache) {
 249     // Do not inline StringCache::profile() method used only at the beginning.
 250     if (callee_method->name() == ciSymbol::profile_name() &&
 251         callee_method->holder()->name() == ciSymbol::java_lang_StringCache()) {
 252       return "profiling method";
 253     }
 254   }
 255 
 256   return NULL;
 257 }
 258 
 259 //-----------------------------try_to_inline-----------------------------------
 260 // return NULL if ok, reason for not inlining otherwise
 261 // Relocated from "InliningClosure::try_to_inline"
 262 const char* InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) {
 263 
 264   // Old algorithm had funny accumulating BC-size counters
 265   if (UseOldInlining && ClipInlining
 266       && (int)count_inline_bcs() >= DesiredMethodLimit) {
 267     return "size > DesiredMethodLimit";
 268   }
 269 
 270   const char *msg = NULL;
 271   if ((msg = shouldInline(callee_method, caller_method, caller_bci,
 272                           profile, wci_result)) != NULL) {
 273     return msg;
 274   }
 275   if ((msg = shouldNotInline(callee_method, caller_method,
 276                              wci_result)) != NULL) {
 277     return msg;
 278   }
 279 
 280   if (InlineAccessors && callee_method->is_accessor()) {
 281     // accessor methods are not subject to any of the following limits.
 282     return NULL;
 283   }
 284 
 285   // suppress a few checks for accessors and trivial methods
 286   if (callee_method->code_size() > MaxTrivialSize) {
 287 
 288     // don't inline into giant methods
 289     if (C->unique() > (uint)NodeCountInliningCutoff) {
 290       return "NodeCountInliningCutoff";
 291     }
 292 
 293     if ((!UseInterpreter || CompileTheWorld) &&
 294         is_init_with_ea(callee_method, caller_method, C)) {
 295 
 296       // Escape Analysis stress testing when running Xcomp or CTW:
 297       // inline constructors even if they are not reached.
 298 
 299     } else if (profile.count() == 0) {
 300       // don't inline unreached call sites
 301       return "call site not reached";
 302     }
 303   }
 304 
 305   if (!C->do_inlining() && InlineAccessors) {
 306     return "not an accessor";
 307   }
 308   if( inline_depth() > MaxInlineLevel ) {
 309     return "inlining too deep";
 310   }
 311   if( method() == callee_method &&
 312       inline_depth() > MaxRecursiveInlineLevel ) {
 313     return "recursively inlining too deep";
 314   }
 315 
 316   int size = callee_method->code_size();
 317 
 318   if (UseOldInlining && ClipInlining
 319       && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
 320     return "size > DesiredMethodLimit";
 321   }
 322 
 323   // ok, inline this method
 324   return NULL;
 325 }
 326 
 327 //------------------------------pass_initial_checks----------------------------
 328 bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
 329   ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL;
 330   // Check if a callee_method was suggested
 331   if( callee_method == NULL )            return false;
 332   // Check if klass of callee_method is loaded
 333   if( !callee_holder->is_loaded() )      return false;
 334   if( !callee_holder->is_initialized() ) return false;
 335   if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) {
 336     // Checks that constant pool's call site has been visited
 337     // stricter than callee_holder->is_initialized()
 338     ciBytecodeStream iter(caller_method);
 339     iter.force_bci(caller_bci);
 340     Bytecodes::Code call_bc = iter.cur_bc();
 341     // An invokedynamic instruction does not have a klass.
 342     if (call_bc != Bytecodes::_invokedynamic) {
 343       int index = iter.get_index_u2_cpcache();
 344       if (!caller_method->is_klass_loaded(index, true)) {
 345         return false;
 346       }
 347       // Try to do constant pool resolution if running Xcomp
 348       if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
 349         return false;
 350       }
 351     }
 352   }
 353   // We will attempt to see if a class/field/etc got properly loaded.  If it
 354   // did not, it may attempt to throw an exception during our probing.  Catch
 355   // and ignore such exceptions and do not attempt to compile the method.
 356   if( callee_method->should_exclude() )  return false;
 357 
 358   return true;
 359 }
 360 
 361 #ifndef PRODUCT
 362 //------------------------------print_inlining---------------------------------
 363 // Really, the failure_msg can be a success message also.
 364 void InlineTree::print_inlining(ciMethod *callee_method, int caller_bci, const char *failure_msg) const {
 365   print_indent(inline_depth());
 366   tty->print("@ %d  ", caller_bci);
 367   if( callee_method ) callee_method->print_short_name();
 368   else                tty->print(" callee not monotonic or profiled");
 369   tty->print("  %s", (failure_msg ? failure_msg : "inline"));
 370   if( Verbose && callee_method ) {
 371     const InlineTree *top = this;
 372     while( top->caller_tree() != NULL ) { top = top->caller_tree(); }
 373     tty->print("  bcs: %d+%d  invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
 374   }
 375   tty->cr();
 376 }
 377 #endif
 378 
 379 //------------------------------ok_to_inline-----------------------------------
 380 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci) {
 381   assert(callee_method != NULL, "caller checks for optimized virtual!");
 382 #ifdef ASSERT
 383   // Make sure the incoming jvms has the same information content as me.
 384   // This means that we can eventually make this whole class AllStatic.
 385   if (jvms->caller() == NULL) {
 386     assert(_caller_jvms == NULL, "redundant instance state");
 387   } else {
 388     assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
 389   }
 390   assert(_method == jvms->method(), "redundant instance state");
 391 #endif
 392   const char *failure_msg   = NULL;
 393   int         caller_bci    = jvms->bci();
 394   ciMethod   *caller_method = jvms->method();
 395 
 396   if( !pass_initial_checks(caller_method, caller_bci, callee_method)) {
 397     if( PrintInlining ) {
 398       failure_msg = "failed_initial_checks";
 399       print_inlining( callee_method, caller_bci, failure_msg);
 400     }
 401     return NULL;
 402   }
 403 
 404   // Check if inlining policy says no.
 405   WarmCallInfo wci = *(initial_wci);
 406   failure_msg = try_to_inline(callee_method, caller_method, caller_bci, profile, &wci);
 407   if (failure_msg != NULL && C->log() != NULL) {
 408     C->log()->begin_elem("inline_fail reason='");
 409     C->log()->text("%s", failure_msg);
 410     C->log()->end_elem("'");
 411   }
 412 
 413 #ifndef PRODUCT
 414   if (UseOldInlining && InlineWarmCalls
 415       && (PrintOpto || PrintOptoInlining || PrintInlining)) {
 416     bool cold = wci.is_cold();
 417     bool hot  = !cold && wci.is_hot();
 418     bool old_cold = (failure_msg != NULL);
 419     if (old_cold != cold || (Verbose || WizardMode)) {
 420       tty->print("   OldInlining= %4s : %s\n           WCI=",
 421                  old_cold ? "cold" : "hot", failure_msg ? failure_msg : "OK");
 422       wci.print();
 423     }
 424   }
 425 #endif
 426   if (UseOldInlining) {
 427     if (failure_msg == NULL)
 428       wci = *(WarmCallInfo::always_hot());
 429     else
 430       wci = *(WarmCallInfo::always_cold());
 431   }
 432   if (!InlineWarmCalls) {
 433     if (!wci.is_cold() && !wci.is_hot()) {
 434       // Do not inline the warm calls.
 435       wci = *(WarmCallInfo::always_cold());
 436     }
 437   }
 438 
 439   if (!wci.is_cold()) {
 440     // In -UseOldInlining, the failure_msg may also be a success message.
 441     if (failure_msg == NULL)  failure_msg = "inline (hot)";
 442 
 443     // Inline!
 444     if( PrintInlining ) print_inlining( callee_method, caller_bci, failure_msg);
 445     if (UseOldInlining)
 446       build_inline_tree_for_callee(callee_method, jvms, caller_bci);
 447     if (InlineWarmCalls && !wci.is_hot())
 448       return new (C) WarmCallInfo(wci);  // copy to heap
 449     return WarmCallInfo::always_hot();
 450   }
 451 
 452   // Do not inline
 453   if (failure_msg == NULL)  failure_msg = "too cold to inline";
 454   if( PrintInlining ) print_inlining( callee_method, caller_bci, failure_msg);
 455   return NULL;
 456 }
 457 
 458 //------------------------------compute_callee_frequency-----------------------
 459 float InlineTree::compute_callee_frequency( int caller_bci ) const {
 460   int count  = method()->interpreter_call_site_count(caller_bci);
 461   int invcnt = method()->interpreter_invocation_count();
 462   float freq = (float)count/(float)invcnt;
 463   // Call-site count / interpreter invocation count, scaled recursively.
 464   // Always between 0.0 and 1.0.  Represents the percentage of the method's
 465   // total execution time used at this call site.
 466 
 467   return freq;
 468 }
 469 
 470 //------------------------------build_inline_tree_for_callee-------------------
 471 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
 472   float recur_frequency = _site_invoke_ratio * compute_callee_frequency(caller_bci);
 473   // Attempt inlining.
 474   InlineTree* old_ilt = callee_at(caller_bci, callee_method);
 475   if (old_ilt != NULL) {
 476     return old_ilt;
 477   }
 478   int new_depth_adjust = 0;
 479   if (caller_jvms->method() != NULL) {
 480     if (caller_jvms->method()->is_method_handle_adapter())
 481       new_depth_adjust -= 1;  // don't count actions in MH or indy adapter frames
 482     else if (callee_method->is_method_handle_invoke()) {
 483       new_depth_adjust -= 1;  // don't count method handle calls from java.dyn implem
 484     }
 485     if (new_depth_adjust != 0 && PrintInlining) {
 486       stringStream nm1; caller_jvms->method()->print_name(&nm1);
 487       stringStream nm2; callee_method->print_name(&nm2);
 488       tty->print_cr("discounting inlining depth from %s to %s", nm1.base(), nm2.base());
 489     }
 490     if (new_depth_adjust != 0 && C->log()) {
 491       int id1 = C->log()->identify(caller_jvms->method());
 492       int id2 = C->log()->identify(callee_method);
 493       C->log()->elem("inline_depth_discount caller='%d' callee='%d'", id1, id2);
 494     }
 495   }
 496   InlineTree *ilt = new InlineTree(C, this, callee_method, caller_jvms, caller_bci, recur_frequency, _site_depth_adjust + new_depth_adjust);
 497   _subtrees.append( ilt );
 498 
 499   NOT_PRODUCT( _count_inlines += 1; )
 500 
 501   return ilt;
 502 }
 503 
 504 
 505 //---------------------------------------callee_at-----------------------------
 506 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
 507   for (int i = 0; i < _subtrees.length(); i++) {
 508     InlineTree* sub = _subtrees.at(i);
 509     if (sub->caller_bci() == bci && callee == sub->method()) {
 510       return sub;
 511     }
 512   }
 513   return NULL;
 514 }
 515 
 516 
 517 //------------------------------build_inline_tree_root-------------------------
 518 InlineTree *InlineTree::build_inline_tree_root() {
 519   Compile* C = Compile::current();
 520 
 521   // Root of inline tree
 522   InlineTree *ilt = new InlineTree(C, NULL, C->method(), NULL, -1, 1.0F, 0);
 523 
 524   return ilt;
 525 }
 526 
 527 
 528 //-------------------------find_subtree_from_root-----------------------------
 529 // Given a jvms, which determines a call chain from the root method,
 530 // find the corresponding inline tree.
 531 // Note: This method will be removed or replaced as InlineTree goes away.
 532 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee, bool create_if_not_found) {
 533   InlineTree* iltp = root;
 534   uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
 535   for (uint d = 1; d <= depth; d++) {
 536     JVMState* jvmsp  = jvms->of_depth(d);
 537     // Select the corresponding subtree for this bci.
 538     assert(jvmsp->method() == iltp->method(), "tree still in sync");
 539     ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
 540     InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
 541     if (!sub) {
 542       if (create_if_not_found && d == depth) {
 543         return iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
 544       }
 545       assert(sub != NULL, "should be a sub-ilt here");
 546       return NULL;
 547     }
 548     iltp = sub;
 549   }
 550   return iltp;
 551 }