Concurrent class unloading
420 if (sciter.type() == relocInfo::static_stub_type &&
421 sciter.static_stub_reloc()->static_call() == static_call_addr) {
422 sciter.static_stub_reloc()->clear_inline_cache();
423 }
424 }
425 }
426 }
427 #endif
428 }
429
430 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
431 template <class CompiledICorStaticCall>
432 static void clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from,
433 bool clean_all) {
434 // Ok, to lookup references to zombies here
435 CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
436 CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
437 if (nm != NULL) {
438 // Clean inline caches pointing to both zombie and not_entrant methods
439 if (clean_all || !nm->is_in_use() || nm->is_unloading() || (nm->method()->code() != nm)) {
440 ic->set_to_clean(from->is_alive());
441 assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
442 }
443 }
444 }
445
446 static void clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from,
447 bool clean_all) {
448 clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from, clean_all);
449 }
450
451 static void clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from,
452 bool clean_all) {
453 clean_if_nmethod_is_unloaded(csc, csc->destination(), from, clean_all);
454 }
455
456 // Cleans caches in nmethods that point to either classes that are unloaded
457 // or nmethods that are unloaded.
458 //
459 // Can be called either in parallel by G1 currently or after all
|
420 if (sciter.type() == relocInfo::static_stub_type &&
421 sciter.static_stub_reloc()->static_call() == static_call_addr) {
422 sciter.static_stub_reloc()->clear_inline_cache();
423 }
424 }
425 }
426 }
427 #endif
428 }
429
430 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
431 template <class CompiledICorStaticCall>
432 static void clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from,
433 bool clean_all) {
434 // Ok, to lookup references to zombies here
435 CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
436 CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
437 if (nm != NULL) {
438 // Clean inline caches pointing to both zombie and not_entrant methods
439 if (clean_all || !nm->is_in_use() || nm->is_unloading() || (nm->method()->code() != nm)) {
440 ic->set_to_clean(from->is_alive() && !from->is_unloading());
441 assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
442 }
443 }
444 }
445
446 static void clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from,
447 bool clean_all) {
448 clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from, clean_all);
449 }
450
451 static void clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from,
452 bool clean_all) {
453 clean_if_nmethod_is_unloaded(csc, csc->destination(), from, clean_all);
454 }
455
456 // Cleans caches in nmethods that point to either classes that are unloaded
457 // or nmethods that are unloaded.
458 //
459 // Can be called either in parallel by G1 currently or after all
|