hotspot/src/share/vm/opto/doCall.cpp

Print this page




  44   if (TraceTypeProfile || PrintInlining NOT_PRODUCT(|| PrintOptoInlining)) {
  45     outputStream* out = tty;
  46     if (!PrintInlining) {
  47       if (NOT_PRODUCT(!PrintOpto &&) !PrintCompilation) {
  48         method->print_short_name();
  49         tty->cr();
  50       }
  51       CompileTask::print_inlining(prof_method, depth, bci);
  52     } else {
  53       out = C->print_inlining_stream();
  54     }
  55     CompileTask::print_inline_indent(depth, out);
  56     out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  57     stringStream ss;
  58     prof_klass->name()->print_symbol_on(&ss);
  59     out->print(ss.as_string());
  60     out->cr();
  61   }
  62 }
  63 














  64 CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
  65                                        JVMState* jvms, bool allow_inline,
  66                                        float prof_factor, bool allow_intrinsics, bool delayed_forbidden) {
  67   ciMethod*       caller   = jvms->method();
  68   int             bci      = jvms->bci();
  69   Bytecodes::Code bytecode = caller->java_code_at_bci(bci);
  70   guarantee(callee != NULL, "failed method resolution");
  71 
  72   // Dtrace currently doesn't work unless all calls are vanilla
  73   if (env()->dtrace_method_probes()) {
  74     allow_inline = false;
  75   }
  76 
  77   // Note: When we get profiling during stage-1 compiles, we want to pull
  78   // from more specific profile data which pertains to this inlining.
  79   // Right now, ignore the information in jvms->caller(), and do method[bci].
  80   ciCallProfile profile = caller->call_profile_at_bci(bci);
  81 
  82   // See how many times this site has been invoked.
  83   int site_count = profile.count();


  94     int r2id = (rid != -1 && profile.has_receiver(1))? log->identify(profile.receiver(1)):-1;
  95     log->begin_elem("call method='%d' count='%d' prof_factor='%g'",
  96                     log->identify(callee), site_count, prof_factor);
  97     if (call_does_dispatch)  log->print(" virtual='1'");
  98     if (allow_inline)     log->print(" inline='1'");
  99     if (receiver_count >= 0) {
 100       log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
 101       if (profile.has_receiver(1)) {
 102         log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
 103       }
 104     }
 105     log->end_elem();
 106   }
 107 
 108   // Special case the handling of certain common, profitable library
 109   // methods.  If these methods are replaced with specialized code,
 110   // then we return it as the inlined version of the call.
 111   // We do this before the strict f.p. check below because the
 112   // intrinsics handle strict f.p. correctly.
 113   if (allow_inline && allow_intrinsics) {
 114     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 115     if (cg != NULL) {
 116       if (cg->is_predicted()) {
 117         // Code without intrinsic but, hopefully, inlined.
 118         CallGenerator* inline_cg = this->call_generator(callee,
 119               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, false);
 120         if (inline_cg != NULL) {
 121           cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg);
 122         }
 123       }
 124       return cg;
 125     }
 126   }
 127 
 128   // Do method handle calls.
 129   // NOTE: This must happen before normal inlining logic below since
 130   // MethodHandle.invoke* are native methods which obviously don't
 131   // have bytecodes and so normal inlining fails.
 132   if (callee->is_method_handle_intrinsic()) {
 133     CallGenerator* cg = CallGenerator::for_method_handle_call(jvms, caller, callee, delayed_forbidden);
 134     assert(cg == NULL || !delayed_forbidden || !cg->is_late_inline() || cg->is_mh_late_inline(), "unexpected CallGenerator");
 135     return cg;
 136   }
 137 
 138   // Do not inline strict fp into non-strict code, or the reverse
 139   if (caller->is_strict() ^ callee->is_strict()) {
 140     allow_inline = false;
 141   }
 142 
 143   // Attempt to inline...


 246           } else {
 247             // Generate virtual call for class check failure path
 248             // in case of polymorphic virtual call site.
 249             miss_cg = CallGenerator::for_virtual_call(callee, vtable_index);
 250           }
 251           if (miss_cg != NULL) {
 252             if (next_hit_cg != NULL) {
 253               trace_type_profile(C, jvms->method(), jvms->depth() - 1, jvms->bci(), next_receiver_method, profile.receiver(1), site_count, profile.receiver_count(1));
 254               // We don't need to record dependency on a receiver here and below.
 255               // Whenever we inline, the dependency is added by Parse::Parse().
 256               miss_cg = CallGenerator::for_predicted_call(profile.receiver(1), miss_cg, next_hit_cg, PROB_MAX);
 257             }
 258             if (miss_cg != NULL) {
 259               trace_type_profile(C, jvms->method(), jvms->depth() - 1, jvms->bci(), receiver_method, profile.receiver(0), site_count, receiver_count);
 260               CallGenerator* cg = CallGenerator::for_predicted_call(profile.receiver(0), miss_cg, hit_cg, profile.receiver_prob(0));
 261               if (cg != NULL)  return cg;
 262             }
 263           }
 264         }
 265       }










 266     }
 267   }
 268 
 269   // There was no special inlining tactic, or it bailed out.
 270   // Use a more generic tactic, like a simple call.
 271   if (call_does_dispatch) {
 272     return CallGenerator::for_virtual_call(callee, vtable_index);
 273   } else {
 274     // Class Hierarchy Analysis or Type Profile reveals a unique target,
 275     // or it is a static or special call.
 276     return CallGenerator::for_direct_call(callee, should_delay_inlining(callee, jvms));
 277   }
 278 }
 279 
 280 // Return true for methods that shouldn't be inlined early so that
 281 // they are easier to analyze and optimize as intrinsics.
 282 bool Compile::should_delay_string_inlining(ciMethod* call_method, JVMState* jvms) {
 283   if (has_stringbuilder()) {
 284 
 285     if ((call_method->holder() == C->env()->StringBuilder_klass() ||




  44   if (TraceTypeProfile || PrintInlining NOT_PRODUCT(|| PrintOptoInlining)) {
  45     outputStream* out = tty;
  46     if (!PrintInlining) {
  47       if (NOT_PRODUCT(!PrintOpto &&) !PrintCompilation) {
  48         method->print_short_name();
  49         tty->cr();
  50       }
  51       CompileTask::print_inlining(prof_method, depth, bci);
  52     } else {
  53       out = C->print_inlining_stream();
  54     }
  55     CompileTask::print_inline_indent(depth, out);
  56     out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  57     stringStream ss;
  58     prof_klass->name()->print_symbol_on(&ss);
  59     out->print(ss.as_string());
  60     out->cr();
  61   }
  62 }
  63 
  64 CallGenerator* Compile::inline_intrinsic(ciMethod* callee, int vtable_index, bool call_does_dispatch,
  65                                 JVMState* jvms, bool allow_inline, float prof_factor) {
  66   CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
  67   if (cg != NULL && cg->is_predicted()) {
  68     // Code without intrinsic but, hopefully, inlined.
  69     CallGenerator* inline_cg = this->call_generator(callee,
  70           vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, false);
  71     if (inline_cg != NULL) {
  72       cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg);
  73     }
  74   }
  75   return cg;
  76 }
  77 
  78 CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
  79                                        JVMState* jvms, bool allow_inline,
  80                                        float prof_factor, bool allow_intrinsics, bool delayed_forbidden) {
  81   ciMethod*       caller   = jvms->method();
  82   int             bci      = jvms->bci();
  83   Bytecodes::Code bytecode = caller->java_code_at_bci(bci);
  84   guarantee(callee != NULL, "failed method resolution");
  85 
  86   // Dtrace currently doesn't work unless all calls are vanilla
  87   if (env()->dtrace_method_probes()) {
  88     allow_inline = false;
  89   }
  90 
  91   // Note: When we get profiling during stage-1 compiles, we want to pull
  92   // from more specific profile data which pertains to this inlining.
  93   // Right now, ignore the information in jvms->caller(), and do method[bci].
  94   ciCallProfile profile = caller->call_profile_at_bci(bci);
  95 
  96   // See how many times this site has been invoked.
  97   int site_count = profile.count();


 108     int r2id = (rid != -1 && profile.has_receiver(1))? log->identify(profile.receiver(1)):-1;
 109     log->begin_elem("call method='%d' count='%d' prof_factor='%g'",
 110                     log->identify(callee), site_count, prof_factor);
 111     if (call_does_dispatch)  log->print(" virtual='1'");
 112     if (allow_inline)     log->print(" inline='1'");
 113     if (receiver_count >= 0) {
 114       log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
 115       if (profile.has_receiver(1)) {
 116         log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
 117       }
 118     }
 119     log->end_elem();
 120   }
 121 
 122   // Special case the handling of certain common, profitable library
 123   // methods.  If these methods are replaced with specialized code,
 124   // then we return it as the inlined version of the call.
 125   // We do this before the strict f.p. check below because the
 126   // intrinsics handle strict f.p. correctly.
 127   if (allow_inline && allow_intrinsics) {
 128     CallGenerator* cg = inline_intrinsic(callee,
 129             vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor);
 130     // If intrinsic is marked low priority, then skip inlining until
 131     // other inlining mechanics is in effect. We will retry after everything 
 132     // else is tried.
 133     if (cg != NULL && !cg->is_low_priority()) {




 134       return cg;
 135     }
 136   }
 137 
 138   // Do method handle calls.
 139   // NOTE: This must happen before normal inlining logic below since
 140   // MethodHandle.invoke* are native methods which obviously don't
 141   // have bytecodes and so normal inlining fails.
 142   if (callee->is_method_handle_intrinsic()) {
 143     CallGenerator* cg = CallGenerator::for_method_handle_call(jvms, caller, callee, delayed_forbidden);
 144     assert(cg == NULL || !delayed_forbidden || !cg->is_late_inline() || cg->is_mh_late_inline(), "unexpected CallGenerator");
 145     return cg;
 146   }
 147 
 148   // Do not inline strict fp into non-strict code, or the reverse
 149   if (caller->is_strict() ^ callee->is_strict()) {
 150     allow_inline = false;
 151   }
 152 
 153   // Attempt to inline...


 256           } else {
 257             // Generate virtual call for class check failure path
 258             // in case of polymorphic virtual call site.
 259             miss_cg = CallGenerator::for_virtual_call(callee, vtable_index);
 260           }
 261           if (miss_cg != NULL) {
 262             if (next_hit_cg != NULL) {
 263               trace_type_profile(C, jvms->method(), jvms->depth() - 1, jvms->bci(), next_receiver_method, profile.receiver(1), site_count, profile.receiver_count(1));
 264               // We don't need to record dependency on a receiver here and below.
 265               // Whenever we inline, the dependency is added by Parse::Parse().
 266               miss_cg = CallGenerator::for_predicted_call(profile.receiver(1), miss_cg, next_hit_cg, PROB_MAX);
 267             }
 268             if (miss_cg != NULL) {
 269               trace_type_profile(C, jvms->method(), jvms->depth() - 1, jvms->bci(), receiver_method, profile.receiver(0), site_count, receiver_count);
 270               CallGenerator* cg = CallGenerator::for_predicted_call(profile.receiver(0), miss_cg, hit_cg, profile.receiver_prob(0));
 271               if (cg != NULL)  return cg;
 272             }
 273           }
 274         }
 275       }
 276     }
 277   }
 278 
 279   // Try intrinsics again. This will inline any low-priority intrinsics, 
 280   // if the code above did not take are of them.
 281   if (allow_inline && allow_intrinsics) {
 282     CallGenerator* cg = inline_intrinsic(callee,
 283             vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor);
 284     if (cg != NULL) {
 285       return cg;
 286     }
 287   }
 288 
 289   // There was no special inlining tactic, or it bailed out.
 290   // Use a more generic tactic, like a simple call.
 291   if (call_does_dispatch) {
 292     return CallGenerator::for_virtual_call(callee, vtable_index);
 293   } else {
 294     // Class Hierarchy Analysis or Type Profile reveals a unique target,
 295     // or it is a static or special call.
 296     return CallGenerator::for_direct_call(callee, should_delay_inlining(callee, jvms));
 297   }
 298 }
 299 
 300 // Return true for methods that shouldn't be inlined early so that
 301 // they are easier to analyze and optimize as intrinsics.
 302 bool Compile::should_delay_string_inlining(ciMethod* call_method, JVMState* jvms) {
 303   if (has_stringbuilder()) {
 304 
 305     if ((call_method->holder() == C->env()->StringBuilder_klass() ||