< prev index next >

src/hotspot/share/opto/doCall.cpp

Print this page




 117   // intrinsics handle strict f.p. correctly.
 118   CallGenerator* cg_intrinsic = NULL;
 119   if (allow_inline && allow_intrinsics) {
 120     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 121     if (cg != NULL) {
 122       if (cg->is_predicated()) {
 123         // Code without intrinsic but, hopefully, inlined.
 124         CallGenerator* inline_cg = this->call_generator(callee,
 125               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
 126         if (inline_cg != NULL) {
 127           cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
 128         }
 129       }
 130 
 131       // If intrinsic does the virtual dispatch, we try to use the type profile
 132       // first, and hopefully inline it as the regular virtual call below.
 133       // We will retry the intrinsic if nothing had claimed it afterwards.
 134       if (cg->does_virtual_dispatch()) {
 135         cg_intrinsic = cg;
 136         cg = NULL;


 137       } else {
 138         return cg;
 139       }
 140     }
 141   }
 142 
 143   // Do method handle calls.
 144   // NOTE: This must happen before normal inlining logic below since
 145   // MethodHandle.invoke* are native methods which obviously don't
 146   // have bytecodes and so normal inlining fails.
 147   if (callee->is_method_handle_intrinsic()) {
 148     CallGenerator* cg = CallGenerator::for_method_handle_call(jvms, caller, callee);
 149     return cg;
 150   }
 151 
 152   // If explicit rounding is required, do not inline strict into non-strict code (or the reverse).
 153   if (Matcher::strict_fp_requires_explicit_rounding &&
 154       caller->is_strict() != callee->is_strict()) {
 155     allow_inline = false;
 156   }


 167     if (!call_does_dispatch) {
 168       InlineTree* ilt = InlineTree::find_subtree_from_root(this->ilt(), jvms->caller(), jvms->method());
 169       WarmCallInfo scratch_ci;
 170       bool should_delay = false;
 171       WarmCallInfo* ci = ilt->ok_to_inline(callee, jvms, profile, &scratch_ci, should_delay);
 172       assert(ci != &scratch_ci, "do not let this pointer escape");
 173       bool allow_inline   = (ci != NULL && !ci->is_cold());
 174       bool require_inline = (allow_inline && ci->is_hot());
 175 
 176       if (allow_inline) {
 177         CallGenerator* cg = CallGenerator::for_inline(callee, expected_uses);
 178 
 179         if (require_inline && cg != NULL) {
 180           // Delay the inlining of this method to give us the
 181           // opportunity to perform some high level optimizations
 182           // first.
 183           if (should_delay_string_inlining(callee, jvms)) {
 184             return CallGenerator::for_string_late_inline(callee, cg);
 185           } else if (should_delay_boxing_inlining(callee, jvms)) {
 186             return CallGenerator::for_boxing_late_inline(callee, cg);


 187           } else if ((should_delay || AlwaysIncrementalInline)) {
 188             return CallGenerator::for_late_inline(callee, cg);
 189           }
 190         }
 191         if (cg == NULL || should_delay) {
 192           // Fall through.
 193         } else if (require_inline || !InlineWarmCalls) {
 194           return cg;
 195         } else {
 196           CallGenerator* cold_cg = call_generator(callee, vtable_index, call_does_dispatch, jvms, false, prof_factor);
 197           return CallGenerator::for_warm_call(ci, cold_cg, cg);
 198         }
 199       }
 200     }
 201 
 202     // Try using the type profile.
 203     if (call_does_dispatch && site_count > 0 && UseTypeProfile) {
 204       // The major receiver's count >= TypeProfileMajorReceiverPercent of site_count.
 205       bool have_major_receiver = profile.has_receiver(0) && (100.*profile.receiver_prob(0) >= (float)TypeProfileMajorReceiverPercent);
 206       ciMethod* receiver_method = NULL;


 404               return true;
 405           }
 406           return false;
 407         }
 408 
 409       default:
 410         return false;
 411     }
 412   }
 413   return false;
 414 }
 415 
 416 bool Compile::should_delay_boxing_inlining(ciMethod* call_method, JVMState* jvms) {
 417   if (eliminate_boxing() && call_method->is_boxing_method()) {
 418     set_has_boxed_value(true);
 419     return aggressive_unboxing();
 420   }
 421   return false;
 422 }
 423 








 424 // uncommon-trap call-sites where callee is unloaded, uninitialized or will not link
 425 bool Parse::can_not_compile_call_site(ciMethod *dest_method, ciInstanceKlass* klass) {
 426   // Additional inputs to consider...
 427   // bc      = bc()
 428   // caller  = method()
 429   // iter().get_method_holder_index()
 430   assert( dest_method->is_loaded(), "ciTypeFlow should not let us get here" );
 431   // Interface classes can be loaded & linked and never get around to
 432   // being initialized.  Uncommon-trap for not-initialized static or
 433   // v-calls.  Let interface calls happen.
 434   ciInstanceKlass* holder_klass = dest_method->holder();
 435   if (!holder_klass->is_being_initialized() &&
 436       !holder_klass->is_initialized() &&
 437       !holder_klass->is_interface()) {
 438     uncommon_trap(Deoptimization::Reason_uninitialized,
 439                   Deoptimization::Action_reinterpret,
 440                   holder_klass);
 441     return true;
 442   }
 443 




 117   // intrinsics handle strict f.p. correctly.
 118   CallGenerator* cg_intrinsic = NULL;
 119   if (allow_inline && allow_intrinsics) {
 120     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 121     if (cg != NULL) {
 122       if (cg->is_predicated()) {
 123         // Code without intrinsic but, hopefully, inlined.
 124         CallGenerator* inline_cg = this->call_generator(callee,
 125               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
 126         if (inline_cg != NULL) {
 127           cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
 128         }
 129       }
 130 
 131       // If intrinsic does the virtual dispatch, we try to use the type profile
 132       // first, and hopefully inline it as the regular virtual call below.
 133       // We will retry the intrinsic if nothing had claimed it afterwards.
 134       if (cg->does_virtual_dispatch()) {
 135         cg_intrinsic = cg;
 136         cg = NULL;
 137       } else if (should_delay_vector_inlining(callee, jvms)) {
 138         return CallGenerator::for_late_inline(callee, cg);
 139       } else {
 140         return cg;
 141       }
 142     }
 143   }
 144 
 145   // Do method handle calls.
 146   // NOTE: This must happen before normal inlining logic below since
 147   // MethodHandle.invoke* are native methods which obviously don't
 148   // have bytecodes and so normal inlining fails.
 149   if (callee->is_method_handle_intrinsic()) {
 150     CallGenerator* cg = CallGenerator::for_method_handle_call(jvms, caller, callee);
 151     return cg;
 152   }
 153 
 154   // If explicit rounding is required, do not inline strict into non-strict code (or the reverse).
 155   if (Matcher::strict_fp_requires_explicit_rounding &&
 156       caller->is_strict() != callee->is_strict()) {
 157     allow_inline = false;
 158   }


 169     if (!call_does_dispatch) {
 170       InlineTree* ilt = InlineTree::find_subtree_from_root(this->ilt(), jvms->caller(), jvms->method());
 171       WarmCallInfo scratch_ci;
 172       bool should_delay = false;
 173       WarmCallInfo* ci = ilt->ok_to_inline(callee, jvms, profile, &scratch_ci, should_delay);
 174       assert(ci != &scratch_ci, "do not let this pointer escape");
 175       bool allow_inline   = (ci != NULL && !ci->is_cold());
 176       bool require_inline = (allow_inline && ci->is_hot());
 177 
 178       if (allow_inline) {
 179         CallGenerator* cg = CallGenerator::for_inline(callee, expected_uses);
 180 
 181         if (require_inline && cg != NULL) {
 182           // Delay the inlining of this method to give us the
 183           // opportunity to perform some high level optimizations
 184           // first.
 185           if (should_delay_string_inlining(callee, jvms)) {
 186             return CallGenerator::for_string_late_inline(callee, cg);
 187           } else if (should_delay_boxing_inlining(callee, jvms)) {
 188             return CallGenerator::for_boxing_late_inline(callee, cg);
 189           } else if (should_delay_vector_reboxing_inlining(callee, jvms)) {
 190             return CallGenerator::for_vector_reboxing_late_inline(callee, cg);
 191           } else if ((should_delay || AlwaysIncrementalInline)) {
 192             return CallGenerator::for_late_inline(callee, cg);
 193           }
 194         }
 195         if (cg == NULL || should_delay) {
 196           // Fall through.
 197         } else if (require_inline || !InlineWarmCalls) {
 198           return cg;
 199         } else {
 200           CallGenerator* cold_cg = call_generator(callee, vtable_index, call_does_dispatch, jvms, false, prof_factor);
 201           return CallGenerator::for_warm_call(ci, cold_cg, cg);
 202         }
 203       }
 204     }
 205 
 206     // Try using the type profile.
 207     if (call_does_dispatch && site_count > 0 && UseTypeProfile) {
 208       // The major receiver's count >= TypeProfileMajorReceiverPercent of site_count.
 209       bool have_major_receiver = profile.has_receiver(0) && (100.*profile.receiver_prob(0) >= (float)TypeProfileMajorReceiverPercent);
 210       ciMethod* receiver_method = NULL;


 408               return true;
 409           }
 410           return false;
 411         }
 412 
 413       default:
 414         return false;
 415     }
 416   }
 417   return false;
 418 }
 419 
 420 bool Compile::should_delay_boxing_inlining(ciMethod* call_method, JVMState* jvms) {
 421   if (eliminate_boxing() && call_method->is_boxing_method()) {
 422     set_has_boxed_value(true);
 423     return aggressive_unboxing();
 424   }
 425   return false;
 426 }
 427 
 428 bool Compile::should_delay_vector_inlining(ciMethod* call_method, JVMState* jvms) {
 429   return EnableVectorSupport && call_method->is_vector_method();
 430 }
 431 
 432 bool Compile::should_delay_vector_reboxing_inlining(ciMethod* call_method, JVMState* jvms) {
 433   return EnableVectorSupport && (call_method->intrinsic_id() == vmIntrinsics::_VectorRebox);
 434 }
 435 
 436 // uncommon-trap call-sites where callee is unloaded, uninitialized or will not link
 437 bool Parse::can_not_compile_call_site(ciMethod *dest_method, ciInstanceKlass* klass) {
 438   // Additional inputs to consider...
 439   // bc      = bc()
 440   // caller  = method()
 441   // iter().get_method_holder_index()
 442   assert( dest_method->is_loaded(), "ciTypeFlow should not let us get here" );
 443   // Interface classes can be loaded & linked and never get around to
 444   // being initialized.  Uncommon-trap for not-initialized static or
 445   // v-calls.  Let interface calls happen.
 446   ciInstanceKlass* holder_klass = dest_method->holder();
 447   if (!holder_klass->is_being_initialized() &&
 448       !holder_klass->is_initialized() &&
 449       !holder_klass->is_interface()) {
 450     uncommon_trap(Deoptimization::Reason_uninitialized,
 451                   Deoptimization::Action_reinterpret,
 452                   holder_klass);
 453     return true;
 454   }
 455 


< prev index next >