--- old/src/share/vm/code/codeCache.cpp 2017-07-25 14:10:54.721617073 +0200 +++ new/src/share/vm/code/codeCache.cpp 2017-07-25 14:10:54.593617077 +0200 @@ -732,6 +732,13 @@ debug_only(verify_perm_nmethods(NULL)); } +void CodeCache::register_scavenge_root_nmethod(nmethod* nm) { + assert_locked_or_safepoint(CodeCache_lock); + if (!nm->on_scavenge_root_list() && nm->detect_scavenge_root_oops()) { + add_scavenge_root_nmethod(nm); + } +} + void CodeCache::add_scavenge_root_nmethod(nmethod* nm) { assert_locked_or_safepoint(CodeCache_lock); --- old/src/share/vm/code/codeCache.hpp 2017-07-25 14:10:55.349617051 +0200 +++ new/src/share/vm/code/codeCache.hpp 2017-07-25 14:10:55.241617054 +0200 @@ -181,6 +181,9 @@ static void scavenge_root_nmethods_do(CodeBlobToOopClosure* f); static nmethod* scavenge_root_nmethods() { return _scavenge_root_nmethods; } + // register_scavenge_root_nmethod() conditionally adds the nmethod to the list + // if it is not already on the list and has a scavengeable root + static void register_scavenge_root_nmethod(nmethod* nm); static void add_scavenge_root_nmethod(nmethod* nm); static void drop_scavenge_root_nmethod(nmethod* nm); --- old/src/share/vm/gc/g1/g1CollectedHeap.cpp 2017-07-25 14:10:55.977617029 +0200 +++ new/src/share/vm/gc/g1/g1CollectedHeap.cpp 2017-07-25 14:10:55.857617033 +0200 @@ -2458,10 +2458,6 @@ return _cmThread->request_concurrent_phase(phase); } -void G1CollectedHeap::verify_nmethod_roots(nmethod* nmethod) { - -} - class PrintRegionClosure: public HeapRegionClosure { outputStream* _st; public: --- old/src/share/vm/gc/g1/g1CollectedHeap.hpp 2017-07-25 14:10:56.741617002 +0200 +++ new/src/share/vm/gc/g1/g1CollectedHeap.hpp 2017-07-25 14:10:56.613617007 +0200 @@ -1389,6 +1389,8 @@ // Unregister the given nmethod from the G1 heap. virtual void unregister_nmethod(nmethod* nm); + virtual void verify_nmethod_roots(nmethod* nmethod) {} + // Free up superfluous code root memory. void purge_code_root_memory(); @@ -1436,8 +1438,6 @@ virtual const char* const* concurrent_phases() const; virtual bool request_concurrent_phase(const char* phase); - void verify_nmethod_roots(nmethod* nmethod); - // The methods below are here for convenience and dispatch the // appropriate method depending on value of the given VerifyOption // parameter. The values for that parameter, and their meanings, --- old/src/share/vm/gc/parallel/parallelScavengeHeap.cpp 2017-07-25 14:10:57.421616978 +0200 +++ new/src/share/vm/gc/parallel/parallelScavengeHeap.cpp 2017-07-25 14:10:57.277616983 +0200 @@ -23,6 +23,7 @@ */ #include "precompiled.hpp" +#include "code/codeCache.hpp" #include "gc/parallel/adjoiningGenerations.hpp" #include "gc/parallel/adjoiningVirtualSpaces.hpp" #include "gc/parallel/cardTableExtension.hpp" @@ -671,3 +672,11 @@ } } #endif + +void ParallelScavengeHeap::register_nmethod(nmethod* nm) { + CodeCache::register_scavenge_root_nmethod(nm); +} + +void ParallelScavengeHeap::verify_nmethod_roots(nmethod* nmethod) { + nmethod->verify_scavenge_root_oops(); +} --- old/src/share/vm/gc/parallel/parallelScavengeHeap.hpp 2017-07-25 14:10:57.993616959 +0200 +++ new/src/share/vm/gc/parallel/parallelScavengeHeap.hpp 2017-07-25 14:10:57.885616962 +0200 @@ -135,6 +135,8 @@ // generational collectors that means during a collection of // the young gen. virtual bool is_scavengable(const void* addr); + virtual void register_nmethod(nmethod* nm); + virtual void verify_nmethod_roots(nmethod* nmethod); size_t max_capacity() const; --- old/src/share/vm/gc/shared/collectedHeap.cpp 2017-07-25 14:10:58.717616933 +0200 +++ new/src/share/vm/gc/shared/collectedHeap.cpp 2017-07-25 14:10:58.589616938 +0200 @@ -137,9 +137,6 @@ void CollectedHeap::register_nmethod(nmethod* nm) { assert_locked_or_safepoint(CodeCache_lock); - if (!nm->on_scavenge_root_list() && nm->detect_scavenge_root_oops()) { - CodeCache::add_scavenge_root_nmethod(nm); - } } void CollectedHeap::unregister_nmethod(nmethod* nm) { --- old/src/share/vm/gc/shared/collectedHeap.hpp 2017-07-25 14:10:59.385616910 +0200 +++ new/src/share/vm/gc/shared/collectedHeap.hpp 2017-07-25 14:10:59.273616914 +0200 @@ -393,8 +393,6 @@ virtual size_t max_tlab_size() const; - virtual void verify_nmethod_roots(nmethod* nmethod); - // An estimate of the maximum allocation that could be performed // for thread-local allocation buffers without triggering any // collection or expansion activity. @@ -573,6 +571,7 @@ // Override with specific mechanism for each specialized heap type. virtual void register_nmethod(nmethod* nm); virtual void unregister_nmethod(nmethod* nm); + virtual void verify_nmethod_roots(nmethod* nmethod); void trace_heap_before_gc(const GCTracer* gc_tracer); void trace_heap_after_gc(const GCTracer* gc_tracer); --- old/src/share/vm/gc/shared/genCollectedHeap.cpp 2017-07-25 14:10:59.953616890 +0200 +++ new/src/share/vm/gc/shared/genCollectedHeap.cpp 2017-07-25 14:10:59.853616894 +0200 @@ -1315,3 +1315,11 @@ } #endif } + +void GenCollectedHeap::register_nmethod(nmethod* nm) { + CodeCache::register_scavenge_root_nmethod(nm); +} + +void GenCollectedHeap::verify_nmethod_roots(nmethod* nmethod) { + nmethod->verify_scavenge_root_oops(); +} --- old/src/share/vm/gc/shared/genCollectedHeap.hpp 2017-07-25 14:11:00.445616873 +0200 +++ new/src/share/vm/gc/shared/genCollectedHeap.hpp 2017-07-25 14:11:00.333616877 +0200 @@ -227,6 +227,8 @@ virtual bool is_scavengable(const void* addr) { return is_in_young((oop)addr); } + virtual void register_nmethod(nmethod* nm); + virtual void verify_nmethod_roots(nmethod* nmethod); // Iteration functions. void oop_iterate_no_header(OopClosure* cl);