src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/share/vm/code/nmethod.cpp	Wed Jul 16 10:46:40 2014
--- new/src/share/vm/code/nmethod.cpp	Wed Jul 16 10:46:40 2014

*** 1716,1725 **** --- 1716,1754 ---- } ic->set_to_clean(); } + /** + * Cleans a static- or IC-call to the interpreter stub if the stub refers to an unloaded method. + */ + template <class CompiledICorStaticCall> + static void clean_call_to_interpreter_stub(CompiledICorStaticCall* ic, address stub, BoolObjectClosure* is_alive, nmethod* from) { + if (ic->is_call_to_interpreted()) { + // Check if the to-interpreter stub contains a reference to dead Method* metadata. + RelocIterator iter(from, stub, stub + CompiledStaticCall::to_interp_stub_size()); + while (iter.next()) { + if (iter.type() == relocInfo::metadata_type) { + Metadata* md = iter.metadata_reloc()->metadata_value(); + // Check if class loader of holder Klass is alive + if (md != NULL && md->is_method() && !((Method*)md)->method_holder()->is_loader_alive(is_alive)) { + ic->set_to_clean(); + return; + } + } + } + } + } + + static void clean_call_to_interpreter_stub(CompiledIC* ic, BoolObjectClosure* is_alive, nmethod* from) { + clean_call_to_interpreter_stub(ic, ic->ic_destination(), is_alive, from); + } + + static void clean_call_to_interpreter_stub(CompiledStaticCall* csc, BoolObjectClosure* is_alive, nmethod* from) { + clean_call_to_interpreter_stub(csc, csc->destination(), is_alive, from); + } + // This is called at the end of the strong tracing/marking phase of a // GC to unload an nmethod if it contains otherwise unreachable // oops. void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
*** 1751,1769 **** --- 1780,1809 ---- // Exception cache clean_exception_cache(is_alive); // If class unloading occurred we first iterate over all inline caches and ! // clear ICs where the cached oop is referring to an unloaded klass or method. // The remaining live cached oops will be traversed in the relocInfo::oop_type // iteration below. ! // clear ICs where the cached oop or the to-interpreter stub (if in use) is + // referring to an unloaded klass or method. The to-interpreter stubs of + // compiled static calls are checked as well. The remaining live cached oops + // will be traversed in the relocInfo::oop_type iteration below. if (unloading_occurred) { RelocIterator iter(this, low_boundary); while(iter.next()) { ! if (iter.type() == relocInfo::virtual_call_type) { CompiledIC *ic = CompiledIC_at(&iter); ! clean_ic_if_metadata_is_dead(ic, is_alive); ! switch (iter.type()) { + case relocInfo::virtual_call_type: { ! clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive); + break; + } + case relocInfo::opt_virtual_call_type: { + clean_call_to_interpreter_stub(CompiledIC_at(&iter), is_alive, this); + break; + } + case relocInfo::static_call_type: { + clean_call_to_interpreter_stub(compiledStaticCall_at(iter.reloc()), is_alive, this); + break; + } } } } // Compiled code
*** 1878,1891 **** --- 1918,1937 ---- postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); break; case relocInfo::opt_virtual_call_type: + if (unloading_occurred) { + clean_call_to_interpreter_stub(CompiledIC_at(&iter), is_alive, this); + } postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); break; case relocInfo::static_call_type: + if (unloading_occurred) { + clean_call_to_interpreter_stub(compiledStaticCall_at(iter.reloc()), is_alive, this); + } postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this); break; case relocInfo::oop_type: if (!is_unloaded) {

src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File