hotspot/src/share/vm/opto/doCall.cpp

Print this page

        

*** 108,117 **** --- 108,118 ---- // Special case the handling of certain common, profitable library // methods. If these methods are replaced with specialized code, // then we return it as the inlined version of the call. // We do this before the strict f.p. check below because the // intrinsics handle strict f.p. correctly. + CallGenerator* cg_intrinsic = NULL; if (allow_inline && allow_intrinsics) { CallGenerator* cg = find_intrinsic(callee, call_does_dispatch); if (cg != NULL) { if (cg->is_predicted()) { // Code without intrinsic but, hopefully, inlined.
*** 119,131 **** --- 120,141 ---- vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, false); if (inline_cg != NULL) { cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg); } } + + // If intrinsic does the virtual dispatch, we try to use the type profile + // first, and hopefully inline it as the regular virtual call below. + // We will retry the intrinsic if nothing had claimed it afterwards. + if (cg->does_virtual_dispatch()) { + cg_intrinsic = cg; + cg = NULL; + } else { return cg; } } + } // Do method handle calls. // NOTE: This must happen before normal inlining logic below since // MethodHandle.invoke* are native methods which obviously don't // have bytecodes and so normal inlining fails.
*** 264,273 **** --- 274,290 ---- } } } } + // Nothing claimed the intrinsic, we go with with straight-forward inlining + // for already discovered intrinsic. + if (allow_inline && allow_intrinsics && cg_intrinsic != NULL) { + assert(cg_intrinsic->does_virtual_dispatch(), "sanity"); + return cg_intrinsic; + } + // There was no special inlining tactic, or it bailed out. // Use a more generic tactic, like a simple call. if (call_does_dispatch) { return CallGenerator::for_virtual_call(callee, vtable_index); } else {