1112 vmIntrinsics::ID id = callee->intrinsic_id();
1113 // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1114 // it attaches statically resolved method to the call site.
1115 if (MethodHandles::is_signature_polymorphic(id) &&
1116 MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1117 bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1118
1119 // Adjust invocation mode according to the attached method.
1120 switch (bc) {
1121 case Bytecodes::_invokeinterface:
1122 if (!attached_method->method_holder()->is_interface()) {
1123 bc = Bytecodes::_invokevirtual;
1124 }
1125 break;
1126 case Bytecodes::_invokehandle:
1127 if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1128 bc = attached_method->is_static() ? Bytecodes::_invokestatic
1129 : Bytecodes::_invokevirtual;
1130 }
1131 break;
1132 }
1133 }
1134 } else {
1135 bc = bytecode.invoke_code();
1136 }
1137
1138 bool has_receiver = bc != Bytecodes::_invokestatic &&
1139 bc != Bytecodes::_invokedynamic &&
1140 bc != Bytecodes::_invokehandle;
1141
1142 // Find receiver for non-static call
1143 if (has_receiver) {
1144 // This register map must be update since we need to find the receiver for
1145 // compiled frames. The receiver might be in a register.
1146 RegisterMap reg_map2(thread);
1147 frame stubFrame = thread->last_frame();
1148 // Caller-frame is a compiled frame
1149 frame callerFrame = stubFrame.sender(®_map2);
1150
1151 if (attached_method.is_null()) {
1362 is_optimized, static_bound, is_nmethod, virtual_call_info,
1363 CHECK_(methodHandle()));
1364 } else {
1365 // static call
1366 CompiledStaticCall::compute_entry(callee_method, is_nmethod, static_call_info);
1367 }
1368
1369 // grab lock, check for deoptimization and potentially patch caller
1370 {
1371 MutexLocker ml_patch(CompiledIC_lock);
1372
1373 // Lock blocks for safepoint during which both nmethods can change state.
1374
1375 // Now that we are ready to patch if the Method* was redefined then
1376 // don't update call site and let the caller retry.
1377 // Don't update call site if callee nmethod was unloaded or deoptimized.
1378 // Don't update call site if callee nmethod was replaced by an other nmethod
1379 // which may happen when multiply alive nmethod (tiered compilation)
1380 // will be supported.
1381 if (!callee_method->is_old() &&
1382 (callee == NULL || callee->is_in_use() && (callee_method->code() == callee))) {
1383 #ifdef ASSERT
1384 // We must not try to patch to jump to an already unloaded method.
1385 if (dest_entry_point != 0) {
1386 CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
1387 assert((cb != NULL) && cb->is_compiled() && (((CompiledMethod*)cb) == callee),
1388 "should not call unloaded nmethod");
1389 }
1390 #endif
1391 if (is_virtual) {
1392 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1393 if (inline_cache->is_clean()) {
1394 inline_cache->set_to_monomorphic(virtual_call_info);
1395 }
1396 } else {
1397 CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());
1398 if (ssc->is_clean()) ssc->set(static_call_info);
1399 }
1400 }
1401
1402 } // unlock CompiledIC_lock
3151 if ((method != NULL) && method->has_reserved_stack_access()) {
3152 ResourceMark rm(thread);
3153 activation = fr;
3154 warning("Potentially dangerous stack overflow in "
3155 "ReservedStackAccess annotated method %s [%d]",
3156 method->name_and_sig_as_C_string(), count++);
3157 EventReservedStackActivation event;
3158 if (event.should_commit()) {
3159 event.set_method(method);
3160 event.commit();
3161 }
3162 }
3163 if (fr.is_first_java_frame()) {
3164 break;
3165 } else {
3166 fr = fr.java_sender();
3167 }
3168 }
3169 return activation;
3170 }
3171
|
1112 vmIntrinsics::ID id = callee->intrinsic_id();
1113 // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1114 // it attaches statically resolved method to the call site.
1115 if (MethodHandles::is_signature_polymorphic(id) &&
1116 MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1117 bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1118
1119 // Adjust invocation mode according to the attached method.
1120 switch (bc) {
1121 case Bytecodes::_invokeinterface:
1122 if (!attached_method->method_holder()->is_interface()) {
1123 bc = Bytecodes::_invokevirtual;
1124 }
1125 break;
1126 case Bytecodes::_invokehandle:
1127 if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1128 bc = attached_method->is_static() ? Bytecodes::_invokestatic
1129 : Bytecodes::_invokevirtual;
1130 }
1131 break;
1132 default:
1133 break;
1134 }
1135 }
1136 } else {
1137 bc = bytecode.invoke_code();
1138 }
1139
1140 bool has_receiver = bc != Bytecodes::_invokestatic &&
1141 bc != Bytecodes::_invokedynamic &&
1142 bc != Bytecodes::_invokehandle;
1143
1144 // Find receiver for non-static call
1145 if (has_receiver) {
1146 // This register map must be update since we need to find the receiver for
1147 // compiled frames. The receiver might be in a register.
1148 RegisterMap reg_map2(thread);
1149 frame stubFrame = thread->last_frame();
1150 // Caller-frame is a compiled frame
1151 frame callerFrame = stubFrame.sender(®_map2);
1152
1153 if (attached_method.is_null()) {
1364 is_optimized, static_bound, is_nmethod, virtual_call_info,
1365 CHECK_(methodHandle()));
1366 } else {
1367 // static call
1368 CompiledStaticCall::compute_entry(callee_method, is_nmethod, static_call_info);
1369 }
1370
1371 // grab lock, check for deoptimization and potentially patch caller
1372 {
1373 MutexLocker ml_patch(CompiledIC_lock);
1374
1375 // Lock blocks for safepoint during which both nmethods can change state.
1376
1377 // Now that we are ready to patch if the Method* was redefined then
1378 // don't update call site and let the caller retry.
1379 // Don't update call site if callee nmethod was unloaded or deoptimized.
1380 // Don't update call site if callee nmethod was replaced by an other nmethod
1381 // which may happen when multiply alive nmethod (tiered compilation)
1382 // will be supported.
1383 if (!callee_method->is_old() &&
1384 (callee == NULL || (callee->is_in_use() && callee_method->code() == callee))) {
1385 #ifdef ASSERT
1386 // We must not try to patch to jump to an already unloaded method.
1387 if (dest_entry_point != 0) {
1388 CodeBlob* cb = CodeCache::find_blob(dest_entry_point);
1389 assert((cb != NULL) && cb->is_compiled() && (((CompiledMethod*)cb) == callee),
1390 "should not call unloaded nmethod");
1391 }
1392 #endif
1393 if (is_virtual) {
1394 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1395 if (inline_cache->is_clean()) {
1396 inline_cache->set_to_monomorphic(virtual_call_info);
1397 }
1398 } else {
1399 CompiledStaticCall* ssc = caller_nm->compiledStaticCall_before(caller_frame.pc());
1400 if (ssc->is_clean()) ssc->set(static_call_info);
1401 }
1402 }
1403
1404 } // unlock CompiledIC_lock
3153 if ((method != NULL) && method->has_reserved_stack_access()) {
3154 ResourceMark rm(thread);
3155 activation = fr;
3156 warning("Potentially dangerous stack overflow in "
3157 "ReservedStackAccess annotated method %s [%d]",
3158 method->name_and_sig_as_C_string(), count++);
3159 EventReservedStackActivation event;
3160 if (event.should_commit()) {
3161 event.set_method(method);
3162 event.commit();
3163 }
3164 }
3165 if (fr.is_first_java_frame()) {
3166 break;
3167 } else {
3168 fr = fr.java_sender();
3169 }
3170 }
3171 return activation;
3172 }
|