1 /*
   2  * Copyright (c) 1998, 2010, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/systemDictionary.hpp"
  27 #include "classfile/vmSymbols.hpp"
  28 #include "compiler/compileLog.hpp"
  29 #include "interpreter/linkResolver.hpp"
  30 #include "oops/objArrayKlass.hpp"
  31 #include "opto/callGenerator.hpp"
  32 #include "opto/parse.hpp"
  33 #include "runtime/handles.inline.hpp"
  34 
  35 //=============================================================================
  36 //------------------------------InlineTree-------------------------------------
  37 InlineTree::InlineTree( Compile* c,
  38                         const InlineTree *caller_tree, ciMethod* callee,
  39                         JVMState* caller_jvms, int caller_bci,
  40                         float site_invoke_ratio, int site_depth_adjust)
  41 : C(c), _caller_jvms(caller_jvms),
  42   _caller_tree((InlineTree*)caller_tree),
  43   _method(callee), _site_invoke_ratio(site_invoke_ratio),
  44   _site_depth_adjust(site_depth_adjust),
  45   _count_inline_bcs(method()->code_size())
  46 {
  47   NOT_PRODUCT(_count_inlines = 0;)
  48   if (_caller_jvms != NULL) {
  49     // Keep a private copy of the caller_jvms:
  50     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
  51     _caller_jvms->set_bci(caller_jvms->bci());
  52     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
  53   }
  54   assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
  55   assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
  56   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
  57   if (UseOldInlining) {
  58     // Update hierarchical counts, count_inline_bcs() and count_inlines()
  59     InlineTree *caller = (InlineTree *)caller_tree;
  60     for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
  61       caller->_count_inline_bcs += count_inline_bcs();
  62       NOT_PRODUCT(caller->_count_inlines++;)
  63     }
  64   }
  65 }
  66 
  67 InlineTree::InlineTree(Compile* c, ciMethod* callee_method, JVMState* caller_jvms,
  68                        float site_invoke_ratio, int site_depth_adjust)
  69 : C(c), _caller_jvms(caller_jvms), _caller_tree(NULL),
  70   _method(callee_method), _site_invoke_ratio(site_invoke_ratio),
  71   _site_depth_adjust(site_depth_adjust),
  72   _count_inline_bcs(method()->code_size())
  73 {
  74   NOT_PRODUCT(_count_inlines = 0;)
  75   assert(!UseOldInlining, "do not use for old stuff");
  76 }
  77 
  78 
  79 
  80 static void print_indent(int depth) {
  81   tty->print("      ");
  82   for (int i = depth; i != 0; --i) tty->print("  ");
  83 }
  84 
  85 static bool is_init_with_ea(ciMethod* callee_method,
  86                             ciMethod* caller_method, Compile* C) {
  87   // True when EA is ON and a java constructor is called or
  88   // a super constructor is called from an inlined java constructor.
  89   return C->do_escape_analysis() && EliminateAllocations &&
  90          ( callee_method->is_initializer() ||
  91            (caller_method->is_initializer() &&
  92             caller_method != C->method() &&
  93             caller_method->holder()->is_subclass_of(callee_method->holder()))
  94          );
  95 }
  96 
  97 // positive filter: should send be inlined?  returns NULL, if yes, or rejection msg
  98 const char* InlineTree::shouldInline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) const {
  99   // Allows targeted inlining
 100   if(callee_method->should_inline()) {
 101     *wci_result = *(WarmCallInfo::always_hot());
 102     if (PrintInlining && Verbose) {
 103       print_indent(inline_depth());
 104       tty->print_cr("Inlined method is hot: ");
 105     }
 106     return NULL;
 107   }
 108 
 109   // positive filter: should send be inlined?  returns NULL (--> yes)
 110   // or rejection msg
 111   int max_size = C->max_inline_size();
 112   int size     = callee_method->code_size();
 113 
 114   // Check for too many throws (and not too huge)
 115   if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
 116      size < InlineThrowMaxSize ) {
 117     wci_result->set_profit(wci_result->profit() * 100);
 118     if (PrintInlining && Verbose) {
 119       print_indent(inline_depth());
 120       tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
 121     }
 122     return NULL;
 123   }
 124 
 125   if (!UseOldInlining) {
 126     return NULL;  // size and frequency are represented in a new way
 127   }
 128 
 129   int call_site_count  = method()->scale_count(profile.count());
 130   int invoke_count     = method()->interpreter_invocation_count();
 131   assert( invoke_count != 0, "Require invokation count greater than zero");
 132   int freq = call_site_count/invoke_count;
 133 
 134   // bump the max size if the call is frequent
 135   if ((freq >= InlineFrequencyRatio) ||
 136       (call_site_count >= InlineFrequencyCount) ||
 137       is_init_with_ea(callee_method, caller_method, C)) {
 138 
 139     max_size = C->freq_inline_size();
 140     if (size <= max_size && TraceFrequencyInlining) {
 141       print_indent(inline_depth());
 142       tty->print_cr("Inlined frequent method (freq=%d count=%d):", freq, call_site_count);
 143       print_indent(inline_depth());
 144       callee_method->print();
 145       tty->cr();
 146     }
 147   } else {
 148     // Not hot.  Check for medium-sized pre-existing nmethod at cold sites.
 149     if (callee_method->has_compiled_code() &&
 150         callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode/4)
 151       return "already compiled into a medium method";
 152   }
 153   if (size > max_size) {
 154     if (max_size > C->max_inline_size())
 155       return "hot method too big";
 156     return "too big";
 157   }
 158   return NULL;
 159 }
 160 
 161 
 162 // negative filter: should send NOT be inlined?  returns NULL, ok to inline, or rejection msg
 163 const char* InlineTree::shouldNotInline(ciMethod *callee_method, ciMethod* caller_method, WarmCallInfo* wci_result) const {
 164   // negative filter: should send NOT be inlined?  returns NULL (--> inline) or rejection msg
 165   if (!UseOldInlining) {
 166     const char* fail = NULL;
 167     if (callee_method->is_abstract())               fail = "abstract method";
 168     // note: we allow ik->is_abstract()
 169     if (!callee_method->holder()->is_initialized()) fail = "method holder not initialized";
 170     if (callee_method->is_native())                 fail = "native method";
 171 
 172     if (fail) {
 173       *wci_result = *(WarmCallInfo::always_cold());
 174       return fail;
 175     }
 176 
 177     if (callee_method->has_unloaded_classes_in_signature()) {
 178       wci_result->set_profit(wci_result->profit() * 0.1);
 179     }
 180 
 181     // don't inline exception code unless the top method belongs to an
 182     // exception class
 183     if (callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
 184       ciMethod* top_method = caller_jvms() ? caller_jvms()->of_depth(1)->method() : method();
 185       if (!top_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
 186         wci_result->set_profit(wci_result->profit() * 0.1);
 187       }
 188     }
 189 
 190     if (callee_method->has_compiled_code() && callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode) {
 191       wci_result->set_profit(wci_result->profit() * 0.1);
 192       // %%% adjust wci_result->size()?
 193     }
 194 
 195     return NULL;
 196   }
 197 
 198   // Always inline MethodHandle methods and generated MethodHandle adapters.
 199   if (callee_method->is_method_handle_invoke() || callee_method->is_method_handle_adapter())
 200     return NULL;
 201 
 202   // First check all inlining restrictions which are required for correctness
 203   if (callee_method->is_abstract())               return "abstract method";
 204   // note: we allow ik->is_abstract()
 205   if (!callee_method->holder()->is_initialized()) return "method holder not initialized";
 206   if (callee_method->is_native())                 return "native method";
 207   if (callee_method->has_unloaded_classes_in_signature()) return "unloaded signature classes";
 208 
 209   if (callee_method->should_inline()) {
 210     // ignore heuristic controls on inlining
 211     return NULL;
 212   }
 213 
 214   // Now perform checks which are heuristic
 215 
 216   if( callee_method->has_compiled_code() && callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode )
 217     return "already compiled into a big method";
 218 
 219   // don't inline exception code unless the top method belongs to an
 220   // exception class
 221   if (caller_tree() != NULL &&
 222       callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
 223     const InlineTree *top = this;
 224     while (top->caller_tree() != NULL) top = top->caller_tree();
 225     ciInstanceKlass* k = top->method()->holder();
 226     if (!k->is_subclass_of(C->env()->Throwable_klass()))
 227       return "exception method";
 228   }
 229 
 230   // use frequency-based objections only for non-trivial methods
 231   if (callee_method->code_size() <= MaxTrivialSize) return NULL;
 232 
 233   // don't use counts with -Xcomp or CTW
 234   if (UseInterpreter && !CompileTheWorld) {
 235 
 236     if (!callee_method->has_compiled_code() &&
 237         !callee_method->was_executed_more_than(0)) {
 238       return "never executed";
 239     }
 240 
 241     if (is_init_with_ea(callee_method, caller_method, C)) {
 242 
 243       // Escape Analysis: inline all executed constructors
 244 
 245     } else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold,
 246                                                            CompileThreshold >> 1))) {
 247       return "executed < MinInliningThreshold times";
 248     }
 249   }
 250 
 251   if (callee_method->should_not_inline()) {
 252     return "disallowed by CompilerOracle";
 253   }
 254 
 255   if (UseStringCache) {
 256     // Do not inline StringCache::profile() method used only at the beginning.
 257     if (callee_method->name() == ciSymbol::profile_name() &&
 258         callee_method->holder()->name() == ciSymbol::java_lang_StringCache()) {
 259       return "profiling method";
 260     }
 261   }
 262 
 263   return NULL;
 264 }
 265 
 266 //-----------------------------try_to_inline-----------------------------------
 267 // return NULL if ok, reason for not inlining otherwise
 268 // Relocated from "InliningClosure::try_to_inline"
 269 const char* InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) {
 270 
 271   // Old algorithm had funny accumulating BC-size counters
 272   if (UseOldInlining && ClipInlining
 273       && (int)count_inline_bcs() >= DesiredMethodLimit) {
 274     return "size > DesiredMethodLimit";
 275   }
 276 
 277   const char *msg = NULL;
 278   if ((msg = shouldInline(callee_method, caller_method, caller_bci,
 279                           profile, wci_result)) != NULL) {
 280     return msg;
 281   }
 282   if ((msg = shouldNotInline(callee_method, caller_method,
 283                              wci_result)) != NULL) {
 284     return msg;
 285   }
 286 
 287   if (InlineAccessors && callee_method->is_accessor()) {
 288     // accessor methods are not subject to any of the following limits.
 289     return NULL;
 290   }
 291 
 292   // suppress a few checks for accessors and trivial methods
 293   if (callee_method->code_size() > MaxTrivialSize) {
 294 
 295     // don't inline into giant methods
 296     if (C->unique() > (uint)NodeCountInliningCutoff) {
 297       return "NodeCountInliningCutoff";
 298     }
 299 
 300     if ((!UseInterpreter || CompileTheWorld) &&
 301         is_init_with_ea(callee_method, caller_method, C)) {
 302 
 303       // Escape Analysis stress testing when running Xcomp or CTW:
 304       // inline constructors even if they are not reached.
 305 
 306     } else if (profile.count() == 0) {
 307       // don't inline unreached call sites
 308       return "call site not reached";
 309     }
 310   }
 311 
 312   if (!C->do_inlining() && InlineAccessors) {
 313     return "not an accessor";
 314   }
 315   if( inline_depth() > MaxInlineLevel ) {
 316     return "inlining too deep";
 317   }
 318   if( method() == callee_method &&
 319       inline_depth() > MaxRecursiveInlineLevel ) {
 320     return "recursively inlining too deep";
 321   }
 322 
 323   int size = callee_method->code_size();
 324 
 325   if (UseOldInlining && ClipInlining
 326       && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
 327     return "size > DesiredMethodLimit";
 328   }
 329 
 330   // ok, inline this method
 331   return NULL;
 332 }
 333 
 334 //------------------------------pass_initial_checks----------------------------
 335 bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
 336   ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL;
 337   // Check if a callee_method was suggested
 338   if( callee_method == NULL )            return false;
 339   // Check if klass of callee_method is loaded
 340   if( !callee_holder->is_loaded() )      return false;
 341   if( !callee_holder->is_initialized() ) return false;
 342   if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) {
 343     // Checks that constant pool's call site has been visited
 344     // stricter than callee_holder->is_initialized()
 345     ciBytecodeStream iter(caller_method);
 346     iter.force_bci(caller_bci);
 347     Bytecodes::Code call_bc = iter.cur_bc();
 348     // An invokedynamic instruction does not have a klass.
 349     if (call_bc != Bytecodes::_invokedynamic) {
 350       int index = iter.get_index_u2_cpcache();
 351       if (!caller_method->is_klass_loaded(index, true)) {
 352         return false;
 353       }
 354       // Try to do constant pool resolution if running Xcomp
 355       if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
 356         return false;
 357       }
 358     }
 359   }
 360   // We will attempt to see if a class/field/etc got properly loaded.  If it
 361   // did not, it may attempt to throw an exception during our probing.  Catch
 362   // and ignore such exceptions and do not attempt to compile the method.
 363   if( callee_method->should_exclude() )  return false;
 364 
 365   return true;
 366 }
 367 
 368 #ifndef PRODUCT
 369 //------------------------------print_inlining---------------------------------
 370 // Really, the failure_msg can be a success message also.
 371 void InlineTree::print_inlining(ciMethod *callee_method, int caller_bci, const char *failure_msg) const {
 372   print_indent(inline_depth());
 373   tty->print("@ %d  ", caller_bci);
 374   if( callee_method ) callee_method->print_short_name();
 375   else                tty->print(" callee not monotonic or profiled");
 376   tty->print("  %s", (failure_msg ? failure_msg : "inline"));
 377   if( Verbose && callee_method ) {
 378     const InlineTree *top = this;
 379     while( top->caller_tree() != NULL ) { top = top->caller_tree(); }
 380     tty->print("  bcs: %d+%d  invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
 381   }
 382   tty->cr();
 383 }
 384 #endif
 385 
 386 //------------------------------ok_to_inline-----------------------------------
 387 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci) {
 388   assert(callee_method != NULL, "caller checks for optimized virtual!");
 389 #ifdef ASSERT
 390   // Make sure the incoming jvms has the same information content as me.
 391   // This means that we can eventually make this whole class AllStatic.
 392   if (jvms->caller() == NULL) {
 393     assert(_caller_jvms == NULL, "redundant instance state");
 394   } else {
 395     assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
 396   }
 397   assert(_method == jvms->method(), "redundant instance state");
 398 #endif
 399   const char *failure_msg   = NULL;
 400   int         caller_bci    = jvms->bci();
 401   ciMethod   *caller_method = jvms->method();
 402 
 403   if( !pass_initial_checks(caller_method, caller_bci, callee_method)) {
 404     if( PrintInlining ) {
 405       failure_msg = "failed_initial_checks";
 406       print_inlining( callee_method, caller_bci, failure_msg);
 407     }
 408     return NULL;
 409   }
 410 
 411   // Check if inlining policy says no.
 412   WarmCallInfo wci = *(initial_wci);
 413   failure_msg = try_to_inline(callee_method, caller_method, caller_bci, profile, &wci);
 414   if (failure_msg != NULL && C->log() != NULL) {
 415     C->log()->begin_elem("inline_fail reason='");
 416     C->log()->text("%s", failure_msg);
 417     C->log()->end_elem("'");
 418   }
 419 
 420 #ifndef PRODUCT
 421   if (UseOldInlining && InlineWarmCalls
 422       && (PrintOpto || PrintOptoInlining || PrintInlining)) {
 423     bool cold = wci.is_cold();
 424     bool hot  = !cold && wci.is_hot();
 425     bool old_cold = (failure_msg != NULL);
 426     if (old_cold != cold || (Verbose || WizardMode)) {
 427       tty->print("   OldInlining= %4s : %s\n           WCI=",
 428                  old_cold ? "cold" : "hot", failure_msg ? failure_msg : "OK");
 429       wci.print();
 430     }
 431   }
 432 #endif
 433   if (UseOldInlining) {
 434     if (failure_msg == NULL)
 435       wci = *(WarmCallInfo::always_hot());
 436     else
 437       wci = *(WarmCallInfo::always_cold());
 438   }
 439   if (!InlineWarmCalls) {
 440     if (!wci.is_cold() && !wci.is_hot()) {
 441       // Do not inline the warm calls.
 442       wci = *(WarmCallInfo::always_cold());
 443     }
 444   }
 445 
 446   if (!wci.is_cold()) {
 447     // In -UseOldInlining, the failure_msg may also be a success message.
 448     if (failure_msg == NULL)  failure_msg = "inline (hot)";
 449 
 450     // Inline!
 451     if( PrintInlining ) print_inlining( callee_method, caller_bci, failure_msg);
 452     if (UseOldInlining)
 453       build_inline_tree_for_callee(callee_method, jvms, caller_bci);
 454     if (InlineWarmCalls && !wci.is_hot())
 455       return new (C) WarmCallInfo(wci);  // copy to heap
 456     return WarmCallInfo::always_hot();
 457   }
 458 
 459   // Do not inline
 460   if (failure_msg == NULL)  failure_msg = "too cold to inline";
 461   if( PrintInlining ) print_inlining( callee_method, caller_bci, failure_msg);
 462   return NULL;
 463 }
 464 
 465 //------------------------------compute_callee_frequency-----------------------
 466 float InlineTree::compute_callee_frequency( int caller_bci ) const {
 467   int count  = method()->interpreter_call_site_count(caller_bci);
 468   int invcnt = method()->interpreter_invocation_count();
 469   float freq = (float)count/(float)invcnt;
 470   // Call-site count / interpreter invocation count, scaled recursively.
 471   // Always between 0.0 and 1.0.  Represents the percentage of the method's
 472   // total execution time used at this call site.
 473 
 474   return freq;
 475 }
 476 
 477 //------------------------------build_inline_tree_for_callee-------------------
 478 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
 479   float recur_frequency = _site_invoke_ratio * compute_callee_frequency(caller_bci);
 480   // Attempt inlining.
 481   InlineTree* old_ilt = callee_at(caller_bci, callee_method);
 482   if (old_ilt != NULL) {
 483     return old_ilt;
 484   }
 485   int new_depth_adjust = 0;
 486   if (caller_jvms->method() != NULL) {
 487     if (caller_jvms->method()->is_method_handle_adapter())
 488       new_depth_adjust -= 1;  // don't count actions in MH or indy adapter frames
 489     else if (callee_method->is_method_handle_invoke()) {
 490       new_depth_adjust -= 1;  // don't count method handle calls from java.dyn implem
 491     }
 492     if (new_depth_adjust != 0 && PrintInlining) {
 493       stringStream nm1; caller_jvms->method()->print_name(&nm1);
 494       stringStream nm2; callee_method->print_name(&nm2);
 495       tty->print_cr("discounting inlining depth from %s to %s", nm1.base(), nm2.base());
 496     }
 497     if (new_depth_adjust != 0 && C->log()) {
 498       int id1 = C->log()->identify(caller_jvms->method());
 499       int id2 = C->log()->identify(callee_method);
 500       C->log()->elem("inline_depth_discount caller='%d' callee='%d'", id1, id2);
 501     }
 502   }
 503   InlineTree *ilt = new InlineTree(C, this, callee_method, caller_jvms, caller_bci, recur_frequency, _site_depth_adjust + new_depth_adjust);
 504   _subtrees.append( ilt );
 505 
 506   NOT_PRODUCT( _count_inlines += 1; )
 507 
 508   return ilt;
 509 }
 510 
 511 
 512 //---------------------------------------callee_at-----------------------------
 513 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
 514   for (int i = 0; i < _subtrees.length(); i++) {
 515     InlineTree* sub = _subtrees.at(i);
 516     if (sub->caller_bci() == bci && callee == sub->method()) {
 517       return sub;
 518     }
 519   }
 520   return NULL;
 521 }
 522 
 523 
 524 //------------------------------build_inline_tree_root-------------------------
 525 InlineTree *InlineTree::build_inline_tree_root() {
 526   Compile* C = Compile::current();
 527 
 528   // Root of inline tree
 529   InlineTree *ilt = new InlineTree(C, NULL, C->method(), NULL, -1, 1.0F, 0);
 530 
 531   return ilt;
 532 }
 533 
 534 
 535 //-------------------------find_subtree_from_root-----------------------------
 536 // Given a jvms, which determines a call chain from the root method,
 537 // find the corresponding inline tree.
 538 // Note: This method will be removed or replaced as InlineTree goes away.
 539 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee, bool create_if_not_found) {
 540   InlineTree* iltp = root;
 541   uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
 542   for (uint d = 1; d <= depth; d++) {
 543     JVMState* jvmsp  = jvms->of_depth(d);
 544     // Select the corresponding subtree for this bci.
 545     assert(jvmsp->method() == iltp->method(), "tree still in sync");
 546     ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
 547     InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
 548     if (!sub) {
 549       if (create_if_not_found && d == depth) {
 550         return iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
 551       }
 552       assert(sub != NULL, "should be a sub-ilt here");
 553       return NULL;
 554     }
 555     iltp = sub;
 556   }
 557   return iltp;
 558 }