1213 assert(caller_nm->is_alive(), "It should be alive");
1214
1215 #ifndef PRODUCT
1216 // tracing/debugging/statistics
1217 int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1218 (is_virtual) ? (&_resolve_virtual_ctr) :
1219 (&_resolve_static_ctr);
1220 Atomic::inc(addr);
1221
1222 if (TraceCallFixup) {
1223 ResourceMark rm(thread);
1224 tty->print("resolving %s%s (%s) call to",
1225 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1226 Bytecodes::name(invoke_code));
1227 callee_method->print_short_name(tty);
1228 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT, caller_frame.pc(), callee_method->code());
1229 }
1230 #endif
1231
1232 // JSR 292 key invariant:
1233 // If the resolved method is a MethodHandle invoke target the call
1234 // site must be a MethodHandle call site, because the lambda form might tail-call
1235 // leaving the stack in a state unknown to either caller or callee
1236 // TODO detune for now but we might need it again
1237 // assert(!callee_method->is_compiled_lambda_form() ||
1238 // caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");
1239
1240 // Compute entry points. This might require generation of C2I converter
1241 // frames, so we cannot be holding any locks here. Furthermore, the
1242 // computation of the entry points is independent of patching the call. We
1243 // always return the entry-point, but we only patch the stub if the call has
1244 // not been deoptimized. Return values: For a virtual call this is an
1245 // (cached_oop, destination address) pair. For a static call/optimized
1246 // virtual this is just a destination address.
1247
1248 StaticCallInfo static_call_info;
1249 CompiledICInfo virtual_call_info;
1250
1251 // Make sure the callee nmethod does not get deoptimized and removed before
1252 // we are done patching the code.
1253 nmethod* callee_nm = callee_method->code();
|
1213 assert(caller_nm->is_alive(), "It should be alive");
1214
1215 #ifndef PRODUCT
1216 // tracing/debugging/statistics
1217 int *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1218 (is_virtual) ? (&_resolve_virtual_ctr) :
1219 (&_resolve_static_ctr);
1220 Atomic::inc(addr);
1221
1222 if (TraceCallFixup) {
1223 ResourceMark rm(thread);
1224 tty->print("resolving %s%s (%s) call to",
1225 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1226 Bytecodes::name(invoke_code));
1227 callee_method->print_short_name(tty);
1228 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT, caller_frame.pc(), callee_method->code());
1229 }
1230 #endif
1231
1232 // JSR 292 key invariant:
1233 // If the resolved method is a MethodHandle invoke target, the call
1234 // site must be a MethodHandle call site, because the lambda form might tail-call
1235 // leaving the stack in a state unknown to either caller or callee
1236 // TODO detune for now but we might need it again
1237 // assert(!callee_method->is_compiled_lambda_form() ||
1238 // caller_nm->is_method_handle_return(caller_frame.pc()), "must be MH call site");
1239
1240 // Compute entry points. This might require generation of C2I converter
1241 // frames, so we cannot be holding any locks here. Furthermore, the
1242 // computation of the entry points is independent of patching the call. We
1243 // always return the entry-point, but we only patch the stub if the call has
1244 // not been deoptimized. Return values: For a virtual call this is an
1245 // (cached_oop, destination address) pair. For a static call/optimized
1246 // virtual this is just a destination address.
1247
1248 StaticCallInfo static_call_info;
1249 CompiledICInfo virtual_call_info;
1250
1251 // Make sure the callee nmethod does not get deoptimized and removed before
1252 // we are done patching the code.
1253 nmethod* callee_nm = callee_method->code();
|