src/share/vm/opto/bytecodeInfo.cpp

Print this page
rev 3904 : 8005071: Incremental inlining for JSR 292
Summary: post parse inlining driven by number of live nodes.
Reviewed-by:


  29 #include "compiler/compileBroker.hpp"
  30 #include "compiler/compileLog.hpp"
  31 #include "interpreter/linkResolver.hpp"
  32 #include "oops/objArrayKlass.hpp"
  33 #include "opto/callGenerator.hpp"
  34 #include "opto/parse.hpp"
  35 #include "runtime/handles.inline.hpp"
  36 
  37 //=============================================================================
  38 //------------------------------InlineTree-------------------------------------
  39 InlineTree::InlineTree(Compile* c,
  40                        const InlineTree *caller_tree, ciMethod* callee,
  41                        JVMState* caller_jvms, int caller_bci,
  42                        float site_invoke_ratio, int max_inline_level) :
  43   C(c),
  44   _caller_jvms(caller_jvms),
  45   _caller_tree((InlineTree*) caller_tree),
  46   _method(callee),
  47   _site_invoke_ratio(site_invoke_ratio),
  48   _max_inline_level(max_inline_level),
  49   _count_inline_bcs(method()->code_size_for_inlining())

  50 {
  51   NOT_PRODUCT(_count_inlines = 0;)
  52   if (_caller_jvms != NULL) {
  53     // Keep a private copy of the caller_jvms:
  54     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
  55     _caller_jvms->set_bci(caller_jvms->bci());
  56     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
  57   }
  58   assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
  59   assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
  60   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
  61   if (UseOldInlining) {
  62     // Update hierarchical counts, count_inline_bcs() and count_inlines()
  63     InlineTree *caller = (InlineTree *)caller_tree;
  64     for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
  65       caller->_count_inline_bcs += count_inline_bcs();
  66       NOT_PRODUCT(caller->_count_inlines++;)
  67     }
  68   }
  69 }


 192       }
 193     }
 194 
 195     if (callee_method->has_compiled_code() &&
 196         callee_method->instructions_size() > InlineSmallCode) {
 197       wci_result->set_profit(wci_result->profit() * 0.1);
 198       // %%% adjust wci_result->size()?
 199     }
 200 
 201     return NULL;
 202   }
 203 
 204   // First check all inlining restrictions which are required for correctness
 205   if ( callee_method->is_abstract())                        return "abstract method";
 206   // note: we allow ik->is_abstract()
 207   if (!callee_method->holder()->is_initialized())           return "method holder not initialized";
 208   if ( callee_method->is_native())                          return "native method";
 209   if ( callee_method->dont_inline())                        return "don't inline by annotation";
 210   if ( callee_method->has_unloaded_classes_in_signature())  return "unloaded signature classes";
 211 
 212   if (callee_method->force_inline() || callee_method->should_inline()) {
 213     // ignore heuristic controls on inlining
 214     return NULL;
 215   }
 216 
 217   // Now perform checks which are heuristic
 218 

 219   if (callee_method->has_compiled_code() &&
 220       callee_method->instructions_size() > InlineSmallCode) {
 221     return "already compiled into a big method";
 222   }

 223 
 224   // don't inline exception code unless the top method belongs to an
 225   // exception class
 226   if (caller_tree() != NULL &&
 227       callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
 228     const InlineTree *top = this;
 229     while (top->caller_tree() != NULL) top = top->caller_tree();
 230     ciInstanceKlass* k = top->method()->holder();
 231     if (!k->is_subclass_of(C->env()->Throwable_klass()))
 232       return "exception method";
 233   }
 234 
 235   if (callee_method->should_not_inline()) {
 236     return "disallowed by CompilerOracle";
 237   }
 238 
 239 #ifndef PRODUCT
 240   if (ciReplay::should_not_inline(callee_method)) {
 241     return "disallowed by ciReplay";
 242   }


 260         !callee_method->was_executed_more_than(0)) {
 261       return "never executed";
 262     }
 263 
 264     if (is_init_with_ea(callee_method, caller_method, C)) {
 265 
 266       // Escape Analysis: inline all executed constructors
 267 
 268     } else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold,
 269                                                            CompileThreshold >> 1))) {
 270       return "executed < MinInliningThreshold times";
 271     }
 272   }
 273 
 274   return NULL;
 275 }
 276 
 277 //-----------------------------try_to_inline-----------------------------------
 278 // return NULL if ok, reason for not inlining otherwise
 279 // Relocated from "InliningClosure::try_to_inline"
 280 const char* InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) {
 281 
 282   // Old algorithm had funny accumulating BC-size counters
 283   if (UseOldInlining && ClipInlining
 284       && (int)count_inline_bcs() >= DesiredMethodLimit) {

 285     return "size > DesiredMethodLimit";



 286   }
 287 
 288   const char *msg = NULL;
 289   msg = should_inline(callee_method, caller_method, caller_bci, profile, wci_result);
 290   if (msg != NULL)
 291     return msg;
 292 
 293   msg = should_not_inline(callee_method, caller_method, wci_result);
 294   if (msg != NULL)
 295     return msg;
 296 
 297   if (InlineAccessors && callee_method->is_accessor()) {
 298     // accessor methods are not subject to any of the following limits.
 299     return NULL;
 300   }
 301 
 302   // suppress a few checks for accessors and trivial methods
 303   if (callee_method->code_size() > MaxTrivialSize) {
 304 
 305     // don't inline into giant methods
 306     if (C->unique() > (uint)NodeCountInliningCutoff) {


 307       return "NodeCountInliningCutoff";



 308     }
 309 
 310     if ((!UseInterpreter || CompileTheWorld) &&
 311         is_init_with_ea(callee_method, caller_method, C)) {
 312 
 313       // Escape Analysis stress testing when running Xcomp or CTW:
 314       // inline constructors even if they are not reached.
 315 
 316     } else if (profile.count() == 0) {
 317       // don't inline unreached call sites
 318       return "call site not reached";
 319     }
 320   }
 321 
 322   if (!C->do_inlining() && InlineAccessors) {
 323     return "not an accessor";
 324   }

 325   if (inline_level() > _max_inline_level) {

 326     return "inlining too deep";



 327   }
 328 
 329   // detect direct and indirect recursive inlining
 330   if (!callee_method->is_compiled_lambda_form()) {
 331     // count the current method and the callee
 332     int inline_level = (method() == callee_method) ? 1 : 0;
 333     if (inline_level > MaxRecursiveInlineLevel)
 334       return "recursively inlining too deep";
 335     // count callers of current method and callee
 336     JVMState* jvms = caller_jvms();
 337     while (jvms != NULL && jvms->has_method()) {
 338       if (jvms->method() == callee_method) {
 339         inline_level++;
 340         if (inline_level > MaxRecursiveInlineLevel)
 341           return "recursively inlining too deep";
 342       }
 343       jvms = jvms->caller();
 344     }
 345   }
 346 
 347   int size = callee_method->code_size_for_inlining();
 348 
 349   if (UseOldInlining && ClipInlining
 350       && (int)count_inline_bcs() + size >= DesiredMethodLimit) {

 351     return "size > DesiredMethodLimit";



 352   }
 353 
 354   // ok, inline this method
 355   return NULL;
 356 }
 357 
 358 //------------------------------pass_initial_checks----------------------------
 359 bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
 360   ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL;
 361   // Check if a callee_method was suggested
 362   if( callee_method == NULL )            return false;
 363   // Check if klass of callee_method is loaded
 364   if( !callee_holder->is_loaded() )      return false;
 365   if( !callee_holder->is_initialized() ) return false;
 366   if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) {
 367     // Checks that constant pool's call site has been visited
 368     // stricter than callee_holder->is_initialized()
 369     ciBytecodeStream iter(caller_method);
 370     iter.force_bci(caller_bci);
 371     Bytecodes::Code call_bc = iter.cur_bc();


 396   if ( callee->is_abstract())                   return "abstract method";
 397   if (!callee->can_be_compiled())               return "not compilable (disabled)";
 398   if (!callee->has_balanced_monitors())         return "not compilable (unbalanced monitors)";
 399   if ( callee->get_flow_analysis()->failing())  return "not compilable (flow analysis failed)";
 400   return NULL;
 401 }
 402 
 403 //------------------------------print_inlining---------------------------------
 404 // Really, the failure_msg can be a success message also.
 405 void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci, const char* failure_msg) const {
 406   C->print_inlining(callee_method, inline_level(), caller_bci, failure_msg ? failure_msg : "inline");
 407   if (callee_method == NULL)  tty->print(" callee not monotonic or profiled");
 408   if (Verbose && callee_method) {
 409     const InlineTree *top = this;
 410     while( top->caller_tree() != NULL ) { top = top->caller_tree(); }
 411     //tty->print("  bcs: %d+%d  invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
 412   }
 413 }
 414 
 415 //------------------------------ok_to_inline-----------------------------------
 416 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci) {
 417   assert(callee_method != NULL, "caller checks for optimized virtual!");

 418 #ifdef ASSERT
 419   // Make sure the incoming jvms has the same information content as me.
 420   // This means that we can eventually make this whole class AllStatic.
 421   if (jvms->caller() == NULL) {
 422     assert(_caller_jvms == NULL, "redundant instance state");
 423   } else {
 424     assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
 425   }
 426   assert(_method == jvms->method(), "redundant instance state");
 427 #endif
 428   const char *failure_msg   = NULL;
 429   int         caller_bci    = jvms->bci();
 430   ciMethod   *caller_method = jvms->method();
 431 
 432   // Do some initial checks.
 433   if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
 434     if (PrintInlining)  print_inlining(callee_method, caller_bci, "failed initial checks");
 435     return NULL;
 436   }
 437 
 438   // Do some parse checks.
 439   failure_msg = check_can_parse(callee_method);
 440   if (failure_msg != NULL) {
 441     if (PrintInlining)  print_inlining(callee_method, caller_bci, failure_msg);
 442     return NULL;
 443   }
 444 
 445   // Check if inlining policy says no.
 446   WarmCallInfo wci = *(initial_wci);
 447   failure_msg = try_to_inline(callee_method, caller_method, caller_bci, profile, &wci);
 448   if (failure_msg != NULL && C->log() != NULL) {
 449     C->log()->inline_fail(failure_msg);
 450   }
 451 
 452 #ifndef PRODUCT
 453   if (UseOldInlining && InlineWarmCalls
 454       && (PrintOpto || PrintOptoInlining || PrintInlining)) {
 455     bool cold = wci.is_cold();
 456     bool hot  = !cold && wci.is_hot();
 457     bool old_cold = (failure_msg != NULL);
 458     if (old_cold != cold || (Verbose || WizardMode)) {
 459       tty->print("   OldInlining= %4s : %s\n           WCI=",
 460                  old_cold ? "cold" : "hot", failure_msg ? failure_msg : "OK");
 461       wci.print();
 462     }
 463   }
 464 #endif
 465   if (UseOldInlining) {
 466     if (failure_msg == NULL)
 467       wci = *(WarmCallInfo::always_hot());




  29 #include "compiler/compileBroker.hpp"
  30 #include "compiler/compileLog.hpp"
  31 #include "interpreter/linkResolver.hpp"
  32 #include "oops/objArrayKlass.hpp"
  33 #include "opto/callGenerator.hpp"
  34 #include "opto/parse.hpp"
  35 #include "runtime/handles.inline.hpp"
  36 
  37 //=============================================================================
  38 //------------------------------InlineTree-------------------------------------
  39 InlineTree::InlineTree(Compile* c,
  40                        const InlineTree *caller_tree, ciMethod* callee,
  41                        JVMState* caller_jvms, int caller_bci,
  42                        float site_invoke_ratio, int max_inline_level) :
  43   C(c),
  44   _caller_jvms(caller_jvms),
  45   _caller_tree((InlineTree*) caller_tree),
  46   _method(callee),
  47   _site_invoke_ratio(site_invoke_ratio),
  48   _max_inline_level(max_inline_level),
  49   _count_inline_bcs(method()->code_size_for_inlining()),
  50   _subtrees(c->comp_arena(), 2, 0, NULL)
  51 {
  52   NOT_PRODUCT(_count_inlines = 0;)
  53   if (_caller_jvms != NULL) {
  54     // Keep a private copy of the caller_jvms:
  55     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
  56     _caller_jvms->set_bci(caller_jvms->bci());
  57     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
  58   }
  59   assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
  60   assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
  61   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
  62   if (UseOldInlining) {
  63     // Update hierarchical counts, count_inline_bcs() and count_inlines()
  64     InlineTree *caller = (InlineTree *)caller_tree;
  65     for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
  66       caller->_count_inline_bcs += count_inline_bcs();
  67       NOT_PRODUCT(caller->_count_inlines++;)
  68     }
  69   }
  70 }


 193       }
 194     }
 195 
 196     if (callee_method->has_compiled_code() &&
 197         callee_method->instructions_size() > InlineSmallCode) {
 198       wci_result->set_profit(wci_result->profit() * 0.1);
 199       // %%% adjust wci_result->size()?
 200     }
 201 
 202     return NULL;
 203   }
 204 
 205   // First check all inlining restrictions which are required for correctness
 206   if ( callee_method->is_abstract())                        return "abstract method";
 207   // note: we allow ik->is_abstract()
 208   if (!callee_method->holder()->is_initialized())           return "method holder not initialized";
 209   if ( callee_method->is_native())                          return "native method";
 210   if ( callee_method->dont_inline())                        return "don't inline by annotation";
 211   if ( callee_method->has_unloaded_classes_in_signature())  return "unloaded signature classes";
 212 
 213   if (callee_method->should_inline()) {
 214     // ignore heuristic controls on inlining
 215     return NULL;
 216   }
 217 
 218   // Now perform checks which are heuristic
 219 
 220   if (!callee_method->force_inline()) {
 221     if (callee_method->has_compiled_code() &&
 222         callee_method->instructions_size() > InlineSmallCode) {
 223     return "already compiled into a big method";
 224     }
 225   }
 226 
 227   // don't inline exception code unless the top method belongs to an
 228   // exception class
 229   if (caller_tree() != NULL &&
 230       callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
 231     const InlineTree *top = this;
 232     while (top->caller_tree() != NULL) top = top->caller_tree();
 233     ciInstanceKlass* k = top->method()->holder();
 234     if (!k->is_subclass_of(C->env()->Throwable_klass()))
 235       return "exception method";
 236   }
 237 
 238   if (callee_method->should_not_inline()) {
 239     return "disallowed by CompilerOracle";
 240   }
 241 
 242 #ifndef PRODUCT
 243   if (ciReplay::should_not_inline(callee_method)) {
 244     return "disallowed by ciReplay";
 245   }


 263         !callee_method->was_executed_more_than(0)) {
 264       return "never executed";
 265     }
 266 
 267     if (is_init_with_ea(callee_method, caller_method, C)) {
 268 
 269       // Escape Analysis: inline all executed constructors
 270 
 271     } else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold,
 272                                                            CompileThreshold >> 1))) {
 273       return "executed < MinInliningThreshold times";
 274     }
 275   }
 276 
 277   return NULL;
 278 }
 279 
 280 //-----------------------------try_to_inline-----------------------------------
 281 // return NULL if ok, reason for not inlining otherwise
 282 // Relocated from "InliningClosure::try_to_inline"
 283 const char* InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result, bool& should_delay) {

 284   // Old algorithm had funny accumulating BC-size counters
 285   if (UseOldInlining && ClipInlining
 286       && (int)count_inline_bcs() >= DesiredMethodLimit) {
 287     if (!callee_method->force_inline() || !IncrementalInline) {
 288       return "size > DesiredMethodLimit";
 289     } else if (!C->inlining_incrementally()) {
 290       should_delay = true;
 291     }
 292   }
 293 
 294   const char *msg = NULL;
 295   msg = should_inline(callee_method, caller_method, caller_bci, profile, wci_result);
 296   if (msg != NULL)
 297     return msg;
 298 
 299   msg = should_not_inline(callee_method, caller_method, wci_result);
 300   if (msg != NULL)
 301     return msg;
 302 
 303   if (InlineAccessors && callee_method->is_accessor()) {
 304     // accessor methods are not subject to any of the following limits.
 305     return NULL;
 306   }
 307 
 308   // suppress a few checks for accessors and trivial methods
 309   if (callee_method->code_size() > MaxTrivialSize) {
 310 
 311     // don't inline into giant methods
 312     if (C->over_inlining_cutoff()) {
 313       if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
 314           || !IncrementalInline) {
 315         return "NodeCountInliningCutoff";
 316       } else {
 317         should_delay = true;
 318       }
 319     }
 320 
 321     if ((!UseInterpreter || CompileTheWorld) &&
 322         is_init_with_ea(callee_method, caller_method, C)) {
 323 
 324       // Escape Analysis stress testing when running Xcomp or CTW:
 325       // inline constructors even if they are not reached.
 326 
 327     } else if (profile.count() == 0) {
 328       // don't inline unreached call sites
 329       return "call site not reached";
 330     }
 331   }
 332 
 333   if (!C->do_inlining() && InlineAccessors) {
 334     return "not an accessor";
 335   }
 336 
 337   if (inline_level() > _max_inline_level) {
 338     if (!callee_method->force_inline() || !IncrementalInline) {
 339       return "inlining too deep";
 340     } else if (!C->inlining_incrementally()) {
 341       should_delay = true;
 342     }
 343   }
 344 
 345   // detect direct and indirect recursive inlining
 346   if (!callee_method->is_compiled_lambda_form()) {
 347     // count the current method and the callee
 348     int inline_level = (method() == callee_method) ? 1 : 0;
 349     if (inline_level > MaxRecursiveInlineLevel)
 350       return "recursively inlining too deep";
 351     // count callers of current method and callee
 352     JVMState* jvms = caller_jvms();
 353     while (jvms != NULL && jvms->has_method()) {
 354       if (jvms->method() == callee_method) {
 355         inline_level++;
 356         if (inline_level > MaxRecursiveInlineLevel)
 357           return "recursively inlining too deep";
 358       }
 359       jvms = jvms->caller();
 360     }
 361   }
 362 
 363   int size = callee_method->code_size_for_inlining();
 364 
 365   if (UseOldInlining && ClipInlining
 366       && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
 367     if (!callee_method->force_inline() || !IncrementalInline) {
 368       return "size > DesiredMethodLimit";
 369     } else if (!C->inlining_incrementally()) {
 370       should_delay = true;
 371     }
 372   }
 373 
 374   // ok, inline this method
 375   return NULL;
 376 }
 377 
 378 //------------------------------pass_initial_checks----------------------------
 379 bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
 380   ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL;
 381   // Check if a callee_method was suggested
 382   if( callee_method == NULL )            return false;
 383   // Check if klass of callee_method is loaded
 384   if( !callee_holder->is_loaded() )      return false;
 385   if( !callee_holder->is_initialized() ) return false;
 386   if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) {
 387     // Checks that constant pool's call site has been visited
 388     // stricter than callee_holder->is_initialized()
 389     ciBytecodeStream iter(caller_method);
 390     iter.force_bci(caller_bci);
 391     Bytecodes::Code call_bc = iter.cur_bc();


 416   if ( callee->is_abstract())                   return "abstract method";
 417   if (!callee->can_be_compiled())               return "not compilable (disabled)";
 418   if (!callee->has_balanced_monitors())         return "not compilable (unbalanced monitors)";
 419   if ( callee->get_flow_analysis()->failing())  return "not compilable (flow analysis failed)";
 420   return NULL;
 421 }
 422 
 423 //------------------------------print_inlining---------------------------------
 424 // Really, the failure_msg can be a success message also.
 425 void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci, const char* failure_msg) const {
 426   C->print_inlining(callee_method, inline_level(), caller_bci, failure_msg ? failure_msg : "inline");
 427   if (callee_method == NULL)  tty->print(" callee not monotonic or profiled");
 428   if (Verbose && callee_method) {
 429     const InlineTree *top = this;
 430     while( top->caller_tree() != NULL ) { top = top->caller_tree(); }
 431     //tty->print("  bcs: %d+%d  invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
 432   }
 433 }
 434 
 435 //------------------------------ok_to_inline-----------------------------------
 436 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci, bool& should_delay) {
 437   assert(callee_method != NULL, "caller checks for optimized virtual!");
 438   assert(!should_delay, "should be initialized to false");
 439 #ifdef ASSERT
 440   // Make sure the incoming jvms has the same information content as me.
 441   // This means that we can eventually make this whole class AllStatic.
 442   if (jvms->caller() == NULL) {
 443     assert(_caller_jvms == NULL, "redundant instance state");
 444   } else {
 445     assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
 446   }
 447   assert(_method == jvms->method(), "redundant instance state");
 448 #endif
 449   const char *failure_msg   = NULL;
 450   int         caller_bci    = jvms->bci();
 451   ciMethod   *caller_method = jvms->method();
 452 
 453   // Do some initial checks.
 454   if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
 455     if (PrintInlining)  print_inlining(callee_method, caller_bci, "failed initial checks");
 456     return NULL;
 457   }
 458 
 459   // Do some parse checks.
 460   failure_msg = check_can_parse(callee_method);
 461   if (failure_msg != NULL) {
 462     if (PrintInlining)  print_inlining(callee_method, caller_bci, failure_msg);
 463     return NULL;
 464   }
 465 
 466   // Check if inlining policy says no.
 467   WarmCallInfo wci = *(initial_wci);
 468   failure_msg = try_to_inline(callee_method, caller_method, caller_bci, profile, &wci, should_delay);
 469   if (failure_msg != NULL && C->log() != NULL) {
 470     C->log()->inline_fail(failure_msg);
 471   }
 472 
 473 #ifndef PRODUCT
 474   if (UseOldInlining && InlineWarmCalls
 475       && (PrintOpto || PrintOptoInlining || PrintInlining)) {
 476     bool cold = wci.is_cold();
 477     bool hot  = !cold && wci.is_hot();
 478     bool old_cold = (failure_msg != NULL);
 479     if (old_cold != cold || (Verbose || WizardMode)) {
 480       tty->print("   OldInlining= %4s : %s\n           WCI=",
 481                  old_cold ? "cold" : "hot", failure_msg ? failure_msg : "OK");
 482       wci.print();
 483     }
 484   }
 485 #endif
 486   if (UseOldInlining) {
 487     if (failure_msg == NULL)
 488       wci = *(WarmCallInfo::always_hot());