104 } 105 log->end_elem(); 106 } 107 108 // Special case the handling of certain common, profitable library 109 // methods. If these methods are replaced with specialized code, 110 // then we return it as the inlined version of the call. 111 // We do this before the strict f.p. check below because the 112 // intrinsics handle strict f.p. correctly. 113 if (allow_inline && allow_intrinsics) { 114 CallGenerator* cg = find_intrinsic(callee, call_does_dispatch); 115 if (cg != NULL) { 116 if (cg->is_predicted()) { 117 // Code without intrinsic but, hopefully, inlined. 118 CallGenerator* inline_cg = this->call_generator(callee, 119 vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, false); 120 if (inline_cg != NULL) { 121 cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg); 122 } 123 } 124 return cg; 125 } 126 } 127 128 // Do method handle calls. 129 // NOTE: This must happen before normal inlining logic below since 130 // MethodHandle.invoke* are native methods which obviously don't 131 // have bytecodes and so normal inlining fails. 132 if (callee->is_method_handle_intrinsic()) { 133 CallGenerator* cg = CallGenerator::for_method_handle_call(jvms, caller, callee, delayed_forbidden); 134 assert(cg == NULL || !delayed_forbidden || !cg->is_late_inline() || cg->is_mh_late_inline(), "unexpected CallGenerator"); 135 return cg; 136 } 137 138 // Do not inline strict fp into non-strict code, or the reverse 139 if (caller->is_strict() ^ callee->is_strict()) { 140 allow_inline = false; 141 } 142 143 // Attempt to inline... 144 if (allow_inline) { | 104 } 105 log->end_elem(); 106 } 107 108 // Special case the handling of certain common, profitable library 109 // methods. If these methods are replaced with specialized code, 110 // then we return it as the inlined version of the call. 111 // We do this before the strict f.p. check below because the 112 // intrinsics handle strict f.p. correctly. 113 if (allow_inline && allow_intrinsics) { 114 CallGenerator* cg = find_intrinsic(callee, call_does_dispatch); 115 if (cg != NULL) { 116 if (cg->is_predicted()) { 117 // Code without intrinsic but, hopefully, inlined. 118 CallGenerator* inline_cg = this->call_generator(callee, 119 vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, false); 120 if (inline_cg != NULL) { 121 cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg); 122 } 123 } 124 125 // Special case for Object.hashCode() intrinsic: 126 // Only inline the intrinsic when the type profile predicts java/lang/Object. 127 // This helps to produce the proper inline cache for user type instead 128 // of inlining the generic hashCode intrinsic which will push us through 129 // the generic slowpath on every invocation. 130 if (UseNewCode && callee->intrinsic_id() == vmIntrinsics::_hashCode) { 131 if (profile.has_receiver(0) && profile.receiver(0)->as_klass()->is_java_lang_Object()) { 132 return cg; 133 } 134 } else { 135 return cg; 136 } 137 } 138 } 139 140 // Do method handle calls. 141 // NOTE: This must happen before normal inlining logic below since 142 // MethodHandle.invoke* are native methods which obviously don't 143 // have bytecodes and so normal inlining fails. 144 if (callee->is_method_handle_intrinsic()) { 145 CallGenerator* cg = CallGenerator::for_method_handle_call(jvms, caller, callee, delayed_forbidden); 146 assert(cg == NULL || !delayed_forbidden || !cg->is_late_inline() || cg->is_mh_late_inline(), "unexpected CallGenerator"); 147 return cg; 148 } 149 150 // Do not inline strict fp into non-strict code, or the reverse 151 if (caller->is_strict() ^ callee->is_strict()) { 152 allow_inline = false; 153 } 154 155 // Attempt to inline... 156 if (allow_inline) { |