< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

*** 1962,1973 **** // interpreted. If the caller is compiled we attempt to patch the caller // so he no longer calls into the interpreter. JRT_LEAF(void, SharedRuntime::fixup_callers_callsite(Method* method, address caller_pc)) Method* moop(method); - address entry_point = moop->from_compiled_entry_no_trampoline(); - // It's possible that deoptimization can occur at a call site which hasn't // been resolved yet, in which case this function will be called from // an nmethod that has been patched for deopt and we can ignore the // request for a fixup. // Also it is possible that we lost a race in that from_compiled_entry --- 1962,1971 ----
*** 1975,1985 **** // we did we'd leap into space because the callsite needs to use // "to interpreter" stub in order to load up the Method*. Don't // ask me how I know this... CodeBlob* cb = CodeCache::find_blob(caller_pc); ! if (cb == NULL || !cb->is_compiled() || entry_point == moop->get_c2i_entry()) { return; } // The check above makes sure this is a nmethod. CompiledMethod* nm = cb->as_compiled_method_or_null(); --- 1973,1987 ---- // we did we'd leap into space because the callsite needs to use // "to interpreter" stub in order to load up the Method*. Don't // ask me how I know this... CodeBlob* cb = CodeCache::find_blob(caller_pc); ! if (cb == NULL || !cb->is_compiled()) { ! return; ! } ! address entry_point = moop->from_compiled_entry_no_trampoline(cb->is_compiled_by_c1()); ! if (entry_point == moop->get_c2i_entry()) { return; } // The check above makes sure this is a nmethod. CompiledMethod* nm = cb->as_compiled_method_or_null();
< prev index next >