--- old/src/share/vm/gc/cms/cmsOopClosures.hpp 2015-06-22 11:04:41.610668465 +0200 +++ new/src/share/vm/gc/cms/cmsOopClosures.hpp 2015-06-22 11:04:41.466667751 +0200 @@ -66,7 +66,8 @@ virtual void do_klass(Klass* k); void do_klass_nv(Klass* k); - virtual void do_class_loader_data(ClassLoaderData* cld); + virtual void do_cld(ClassLoaderData* cld) { do_cld_nv(cld); } + void do_cld_nv(ClassLoaderData* cld); }; class MarkRefsIntoClosure: public MetadataAwareOopsInGenClosure { --- old/src/share/vm/gc/cms/cmsOopClosures.inline.hpp 2015-06-22 11:04:41.846669636 +0200 +++ new/src/share/vm/gc/cms/cmsOopClosures.inline.hpp 2015-06-22 11:04:41.702668922 +0200 @@ -50,11 +50,11 @@ inline void MetadataAwareOopsInGenClosure::do_klass_nv(Klass* k) { ClassLoaderData* cld = k->class_loader_data(); - do_class_loader_data(cld); + do_cld_nv(cld); } inline void MetadataAwareOopsInGenClosure::do_klass(Klass* k) { do_klass_nv(k); } -inline void MetadataAwareOopsInGenClosure::do_class_loader_data(ClassLoaderData* cld) { +inline void MetadataAwareOopsInGenClosure::do_cld_nv(ClassLoaderData* cld) { assert(_klass_closure._oop_closure == this, "Must be"); bool claim = true; // Must claim the class loader data before processing. --- old/src/share/vm/gc/cms/concurrentMarkSweepGeneration.cpp 2015-06-22 11:04:42.062670707 +0200 +++ new/src/share/vm/gc/cms/concurrentMarkSweepGeneration.cpp 2015-06-22 11:04:41.926670032 +0200 @@ -4631,7 +4631,7 @@ ResourceMark rm; GrowableArray* array = ClassLoaderDataGraph::new_clds(); for (int i = 0; i < array->length(); i++) { - par_mrias_cl.do_class_loader_data(array->at(i)); + par_mrias_cl.do_cld_nv(array->at(i)); } // We don't need to keep track of new CLDs anymore. @@ -5207,7 +5207,7 @@ ResourceMark rm; GrowableArray* array = ClassLoaderDataGraph::new_clds(); for (int i = 0; i < array->length(); i++) { - mrias_cl.do_class_loader_data(array->at(i)); + mrias_cl.do_cld_nv(array->at(i)); } // We don't need to keep track of new CLDs anymore. --- old/src/share/vm/gc/g1/g1MarkSweep.cpp 2015-06-22 11:04:42.402672393 +0200 +++ new/src/share/vm/gc/g1/g1MarkSweep.cpp 2015-06-22 11:04:42.286671818 +0200 @@ -74,7 +74,7 @@ assert(rp != NULL, "should be non-NULL"); assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition"); - GenMarkSweep::_ref_processor = rp; + GenMarkSweep::set_ref_processor(rp); rp->setup_policy(clear_all_softrefs); // When collecting the permanent generation Method*s may be moving, @@ -108,7 +108,7 @@ JvmtiExport::gc_epilogue(); // refs processing: clean slate - GenMarkSweep::_ref_processor = NULL; + GenMarkSweep::set_ref_processor(NULL); } --- old/src/share/vm/gc/parallel/psMarkSweep.cpp 2015-06-22 11:04:42.618673464 +0200 +++ new/src/share/vm/gc/parallel/psMarkSweep.cpp 2015-06-22 11:04:42.486672809 +0200 @@ -60,7 +60,7 @@ void PSMarkSweep::initialize() { MemRegion mr = ParallelScavengeHeap::heap()->reserved_region(); - _ref_processor = new ReferenceProcessor(mr); // a vanilla ref proc + set_ref_processor(new ReferenceProcessor(mr)); // a vanilla ref proc _counters = new CollectorCounters("PSMarkSweep", 1); } --- old/src/share/vm/gc/serial/genMarkSweep.cpp 2015-06-22 11:04:42.898674852 +0200 +++ new/src/share/vm/gc/serial/genMarkSweep.cpp 2015-06-22 11:04:42.718673960 +0200 @@ -67,7 +67,7 @@ // hook up weak ref data so it can be used during Mark-Sweep assert(ref_processor() == NULL, "no stomping"); assert(rp != NULL, "should be non-NULL"); - _ref_processor = rp; + set_ref_processor(rp); rp->setup_policy(clear_all_softrefs); GCTraceTime t1(GCCauseString("Full GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, NULL, _gc_tracer->gc_id()); @@ -136,7 +136,7 @@ } // refs processing: clean slate - _ref_processor = NULL; + set_ref_processor(NULL); // Update heap occupancy information which is used as // input to soft ref clearing policy at the next gc. --- old/src/share/vm/gc/serial/markSweep.cpp 2015-06-22 11:04:43.142676062 +0200 +++ new/src/share/vm/gc/serial/markSweep.cpp 2015-06-22 11:04:42.986675289 +0200 @@ -28,6 +28,8 @@ #include "gc/shared/collectedHeap.inline.hpp" #include "gc/shared/gcTimer.hpp" #include "gc/shared/gcTrace.hpp" +#include "gc/shared/specialized_oop_closures.hpp" +#include "memory/iterator.inline.hpp" #include "oops/instanceKlass.inline.hpp" #include "oops/instanceMirrorKlass.inline.hpp" #include "oops/methodData.hpp" @@ -53,173 +55,33 @@ void MarkSweep::FollowRootClosure::do_oop(oop* p) { follow_root(p); } void MarkSweep::FollowRootClosure::do_oop(narrowOop* p) { follow_root(p); } -MarkSweep::MarkAndPushClosure MarkSweep::mark_and_push_closure; +MarkAndPushClosure MarkSweep::mark_and_push_closure; CLDToOopClosure MarkSweep::follow_cld_closure(&mark_and_push_closure); CLDToOopClosure MarkSweep::adjust_cld_closure(&adjust_pointer_closure); template -void MarkSweep::MarkAndPushClosure::do_oop_nv(T* p) { mark_and_push(p); } -void MarkSweep::MarkAndPushClosure::do_oop(oop* p) { do_oop_nv(p); } -void MarkSweep::MarkAndPushClosure::do_oop(narrowOop* p) { do_oop_nv(p); } - -void MarkSweep::follow_class_loader(ClassLoaderData* cld) { - MarkSweep::follow_cld_closure.do_cld(cld); -} - -void InstanceKlass::oop_ms_follow_contents(oop obj) { - assert(obj != NULL, "can't follow the content of NULL object"); - MarkSweep::follow_klass(this); - - oop_oop_iterate_oop_maps(obj, &MarkSweep::mark_and_push_closure); -} - -void InstanceMirrorKlass::oop_ms_follow_contents(oop obj) { - InstanceKlass::oop_ms_follow_contents(obj); - - // Follow the klass field in the mirror - Klass* klass = java_lang_Class::as_Klass(obj); - if (klass != NULL) { - // An anonymous class doesn't have its own class loader, so the call - // to follow_klass will mark and push its java mirror instead of the - // class loader. When handling the java mirror for an anonymous class - // we need to make sure its class loader data is claimed, this is done - // by calling follow_class_loader explicitly. For non-anonymous classes - // the call to follow_class_loader is made when the class loader itself - // is handled. - if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) { - MarkSweep::follow_class_loader(klass->class_loader_data()); - } else { - MarkSweep::follow_klass(klass); - } - } else { - // If klass is NULL then this a mirror for a primitive type. - // We don't have to follow them, since they are handled as strong - // roots in Universe::oops_do. - assert(java_lang_Class::is_primitive(obj), "Sanity check"); - } - - oop_oop_iterate_statics(obj, &MarkSweep::mark_and_push_closure); -} - -void InstanceClassLoaderKlass::oop_ms_follow_contents(oop obj) { - InstanceKlass::oop_ms_follow_contents(obj); - - ClassLoaderData * const loader_data = java_lang_ClassLoader::loader_data(obj); - - // We must NULL check here, since the class loader - // can be found before the loader data has been set up. - if(loader_data != NULL) { - MarkSweep::follow_class_loader(loader_data); - } -} - -template -static void oop_ms_follow_contents_specialized(InstanceRefKlass* klass, oop obj) { - T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); - T heap_oop = oopDesc::load_heap_oop(referent_addr); - debug_only( - if(TraceReferenceGC && PrintGCDetails) { - gclog_or_tty->print_cr("InstanceRefKlass::oop_ms_follow_contents_specialized " PTR_FORMAT, p2i(obj)); - } - ) - if (!oopDesc::is_null(heap_oop)) { - oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); - if (!referent->is_gc_marked() && - MarkSweep::ref_processor()->discover_reference(obj, klass->reference_type())) { - // reference was discovered, referent will be traversed later - klass->InstanceKlass::oop_ms_follow_contents(obj); - debug_only( - if(TraceReferenceGC && PrintGCDetails) { - gclog_or_tty->print_cr(" Non NULL enqueued " PTR_FORMAT, p2i(obj)); - } - ) - return; - } else { - // treat referent as normal oop - debug_only( - if(TraceReferenceGC && PrintGCDetails) { - gclog_or_tty->print_cr(" Non NULL normal " PTR_FORMAT, p2i(obj)); - } - ) - MarkSweep::mark_and_push(referent_addr); - } - } - T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); - // Treat discovered as normal oop, if ref is not "active", - // i.e. if next is non-NULL. - T next_oop = oopDesc::load_heap_oop(next_addr); - if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active" - T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); - debug_only( - if(TraceReferenceGC && PrintGCDetails) { - gclog_or_tty->print_cr(" Process discovered as normal " - PTR_FORMAT, p2i(discovered_addr)); - } - ) - MarkSweep::mark_and_push(discovered_addr); - } - // treat next as normal oop. next is a link in the reference queue. - debug_only( - if(TraceReferenceGC && PrintGCDetails) { - gclog_or_tty->print_cr(" Process next as normal " PTR_FORMAT, p2i(next_addr)); - } - ) - MarkSweep::mark_and_push(next_addr); - klass->InstanceKlass::oop_ms_follow_contents(obj); -} - -void InstanceRefKlass::oop_ms_follow_contents(oop obj) { - if (UseCompressedOops) { - oop_ms_follow_contents_specialized(this, obj); - } else { - oop_ms_follow_contents_specialized(this, obj); - } -} +void MarkAndPushClosure::do_oop_nv(T* p) { MarkSweep::mark_and_push(p); } +void MarkAndPushClosure::do_oop(oop* p) { do_oop_nv(p); } +void MarkAndPushClosure::do_oop(narrowOop* p) { do_oop_nv(p); } +bool MarkAndPushClosure::do_metadata() { return do_metadata_nv(); } +bool MarkAndPushClosure::do_metadata_nv() { return true; } +void MarkAndPushClosure::do_klass(Klass* k) { do_klass_nv(k); } +void MarkAndPushClosure::do_klass_nv(Klass* k) { MarkSweep::follow_klass(k); } +void MarkAndPushClosure::do_cld(ClassLoaderData* cld) { do_cld_nv(cld); } +void MarkAndPushClosure::do_cld_nv(ClassLoaderData* cld) { MarkSweep::follow_cld(cld); } -template -static void oop_ms_follow_contents_specialized(oop obj, int index) { - objArrayOop a = objArrayOop(obj); - const size_t len = size_t(a->length()); - const size_t beg_index = size_t(index); +void MarkSweep::follow_array(objArrayOop array, int index) { + const int len = array->length(); + const int beg_index = index; assert(beg_index < len || len == 0, "index too large"); - const size_t stride = MIN2(len - beg_index, ObjArrayMarkingStride); - const size_t end_index = beg_index + stride; - T* const base = (T*)a->base(); - T* const beg = base + beg_index; - T* const end = base + end_index; - - // Push the non-NULL elements of the next stride on the marking stack. - for (T* e = beg; e < end; e++) { - MarkSweep::mark_and_push(e); - } - - if (end_index < len) { - MarkSweep::push_objarray(a, end_index); // Push the continuation. - } -} + const int stride = MIN2(len - beg_index, (int) ObjArrayMarkingStride); + const int end_index = beg_index + stride; -void ObjArrayKlass::oop_ms_follow_contents(oop obj) { - assert (obj->is_array(), "obj must be array"); - MarkSweep::follow_klass(this); - if (UseCompressedOops) { - oop_ms_follow_contents_specialized(obj, 0); - } else { - oop_ms_follow_contents_specialized(obj, 0); - } -} + array->oop_iterate_range(&mark_and_push_closure, beg_index, end_index); -void TypeArrayKlass::oop_ms_follow_contents(oop obj) { - assert(obj->is_typeArray(),"must be a type array"); - // Performance tweak: We skip iterating over the klass pointer since we - // know that Universe::TypeArrayKlass never moves. -} - -void MarkSweep::follow_array(objArrayOop array, int index) { - if (UseCompressedOops) { - oop_ms_follow_contents_specialized(array, index); - } else { - oop_ms_follow_contents_specialized(array, index); + if (end_index < len) { + MarkSweep::push_objarray(array, end_index); // Push the continuation. } } @@ -266,6 +128,11 @@ } } +void MarkSweep::set_ref_processor(ReferenceProcessor* rp) { + _ref_processor = rp; + mark_and_push_closure.set_ref_processor(_ref_processor); +} + MarkSweep::AdjustPointerClosure MarkSweep::adjust_pointer_closure; template @@ -405,3 +272,6 @@ // know that Universe::TypeArrayKlass never moves. return t->object_size(); } + +// Generate MS specialized oop_oop_iterate functions. +SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_MS(ALL_KLASS_OOP_OOP_ITERATE_DEFN) --- old/src/share/vm/gc/serial/markSweep.hpp 2015-06-22 11:04:43.394677312 +0200 +++ new/src/share/vm/gc/serial/markSweep.hpp 2015-06-22 11:04:43.250676598 +0200 @@ -49,6 +49,7 @@ // declared at end class PreservedMark; +class MarkAndPushClosure; class MarkSweep : AllStatic { // @@ -60,13 +61,6 @@ virtual void do_oop(narrowOop* p); }; - class MarkAndPushClosure: public ExtendedOopClosure { - public: - template void do_oop_nv(T* p); - virtual void do_oop(oop* p); - virtual void do_oop(narrowOop* p); - }; - class FollowStackClosure: public VoidClosure { public: virtual void do_void(); @@ -146,6 +140,7 @@ // Reference Processing static ReferenceProcessor* const ref_processor() { return _ref_processor; } + static void set_ref_processor(ReferenceProcessor* rp); // Archive Object handling static inline bool is_archive_object(oop object); @@ -171,7 +166,7 @@ static void follow_klass(Klass* klass); - static void follow_class_loader(ClassLoaderData* cld); + static void follow_cld(ClassLoaderData* cld); static int adjust_pointers(oop obj); @@ -183,6 +178,24 @@ template static inline void adjust_pointer(T* p); }; +class MarkAndPushClosure: public ExtendedOopClosure { +public: + template void do_oop_nv(T* p); + virtual void do_oop(oop* p); + virtual void do_oop(narrowOop* p); + + virtual bool do_metadata(); + bool do_metadata_nv(); + + virtual void do_klass(Klass* k); + void do_klass_nv(Klass* k); + + virtual void do_cld(ClassLoaderData* cld); + void do_cld_nv(ClassLoaderData* cld); + + void set_ref_processor(ReferenceProcessor* rp) { _ref_processor = rp; } +}; + class PreservedMark VALUE_OBJ_CLASS_SPEC { private: oop _obj; --- old/src/share/vm/gc/serial/markSweep.inline.hpp 2015-06-22 11:04:43.650678581 +0200 +++ new/src/share/vm/gc/serial/markSweep.inline.hpp 2015-06-22 11:04:43.486677768 +0200 @@ -72,10 +72,14 @@ MarkSweep::mark_and_push(&op); } +inline void MarkSweep::follow_cld(ClassLoaderData* cld) { + MarkSweep::follow_cld_closure.do_cld(cld); +} + inline void MarkSweep::follow_object(oop obj) { assert(obj->is_gc_marked(), "should be marked"); - obj->ms_follow_contents(); + obj->oop_iterate(&mark_and_push_closure); } template inline void MarkSweep::follow_root(T* p) { --- old/src/share/vm/gc/shared/specialized_oop_closures.hpp 2015-06-22 11:04:43.894679791 +0200 +++ new/src/share/vm/gc/shared/specialized_oop_closures.hpp 2015-06-22 11:04:43.746679057 +0200 @@ -45,6 +45,8 @@ // ParNew class ParScanWithBarrierClosure; class ParScanWithoutBarrierClosure; +// MarkSweep +class MarkAndPushClosure; // CMS class MarkRefsIntoAndScanClosure; class Par_MarkRefsIntoAndScanClosure; @@ -87,6 +89,9 @@ SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_S(f) \ SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_P(f) +#define SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_MS(f) \ + f(MarkAndPushClosure,_nv) + #if INCLUDE_ALL_GCS #define SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_CMS(f) \ f(MarkRefsIntoAndScanClosure,_nv) \ @@ -101,10 +106,12 @@ #if INCLUDE_ALL_GCS #define SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_2(f) \ + SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_MS(f) \ SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_CMS(f) \ SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_G1(f) #else // INCLUDE_ALL_GCS -#define SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_2(f) +#define SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_2(f) \ + SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_MS(f) #endif // INCLUDE_ALL_GCS --- old/src/share/vm/memory/iterator.hpp 2015-06-22 11:04:44.118680902 +0200 +++ new/src/share/vm/memory/iterator.hpp 2015-06-22 11:04:43.978680208 +0200 @@ -61,7 +61,7 @@ // // 1) do_klass on the header klass pointer. // 2) do_klass on the klass pointer in the mirrors. - // 3) do_class_loader_data on the class loader data in class loaders. + // 3) do_cld on the class loader data in class loaders. // // The virtual (without suffix) and the non-virtual (with _nv suffix) need // to be updated together, or else the devirtualization will break. @@ -77,7 +77,8 @@ virtual void do_klass(Klass* k) { do_klass_nv(k); } void do_klass_nv(Klass* k) { ShouldNotReachHere(); } - virtual void do_class_loader_data(ClassLoaderData* cld) { ShouldNotReachHere(); } + virtual void do_cld(ClassLoaderData* cld) { do_cld_nv(cld); } + void do_cld_nv(ClassLoaderData* cld) { ShouldNotReachHere(); } // True iff this closure may be safely applied more than once to an oop // location without an intervening "major reset" (like the end of a GC). @@ -186,7 +187,8 @@ virtual void do_klass(Klass* k); void do_klass_nv(Klass* k); - virtual void do_class_loader_data(ClassLoaderData* cld); + virtual void do_cld(ClassLoaderData* cld) { do_cld_nv(cld); } + void do_cld_nv(ClassLoaderData* cld); }; // ObjectClosure is used for iterating through an object space @@ -370,6 +372,7 @@ public: template static void do_oop(OopClosureType* closure, T* p); template static void do_klass(OopClosureType* closure, Klass* k); + template static void do_cld(OopClosureType* closure, ClassLoaderData* cld); template static bool do_metadata(OopClosureType* closure); }; @@ -378,6 +381,7 @@ public: template static void do_oop(OopClosureType* closure, T* p); template static void do_klass(OopClosureType* closure, Klass* k); + template static void do_cld(OopClosureType* closure, ClassLoaderData* cld); template static bool do_metadata(OopClosureType* closure); }; --- old/src/share/vm/memory/iterator.inline.hpp 2015-06-22 11:04:44.366682132 +0200 +++ new/src/share/vm/memory/iterator.inline.hpp 2015-06-22 11:04:44.218681398 +0200 @@ -36,7 +36,7 @@ #include "oops/typeArrayKlass.inline.hpp" #include "utilities/debug.hpp" -inline void MetadataAwareOopClosure::do_class_loader_data(ClassLoaderData* cld) { +inline void MetadataAwareOopClosure::do_cld_nv(ClassLoaderData* cld) { assert(_klass_closure._oop_closure == this, "Must be"); bool claim = true; // Must claim the class loader data before processing. @@ -45,10 +45,12 @@ inline void MetadataAwareOopClosure::do_klass_nv(Klass* k) { ClassLoaderData* cld = k->class_loader_data(); - do_class_loader_data(cld); + do_cld_nv(cld); } -inline void MetadataAwareOopClosure::do_klass(Klass* k) { do_klass_nv(k); } +inline void MetadataAwareOopClosure::do_klass(Klass* k) { + do_klass_nv(k); +} #ifdef ASSERT // This verification is applied to all visited oops. @@ -78,6 +80,10 @@ closure->do_klass_nv(k); } template +void Devirtualizer::do_cld(OopClosureType* closure, ClassLoaderData* cld) { + closure->do_cld_nv(cld); +} +template inline bool Devirtualizer::do_metadata(OopClosureType* closure) { // Make sure the non-virtual and the virtual versions match. assert(closure->do_metadata_nv() == closure->do_metadata(), "Inconsistency in do_metadata"); @@ -96,6 +102,10 @@ closure->do_klass(k); } template +void Devirtualizer::do_cld(OopClosureType* closure, ClassLoaderData* cld) { + closure->do_cld(cld); +} +template bool Devirtualizer::do_metadata(OopClosureType* closure) { return closure->do_metadata(); } --- old/src/share/vm/oops/instanceClassLoaderKlass.hpp 2015-06-22 11:04:44.578683183 +0200 +++ new/src/share/vm/oops/instanceClassLoaderKlass.hpp 2015-06-22 11:04:44.446682528 +0200 @@ -51,7 +51,6 @@ // GC specific object visitors // // Mark Sweep - void oop_ms_follow_contents(oop obj); int oop_ms_adjust_pointers(oop obj); #if INCLUDE_ALL_GCS // Parallel Scavenge --- old/src/share/vm/oops/instanceClassLoaderKlass.inline.hpp 2015-06-22 11:04:44.822684393 +0200 +++ new/src/share/vm/oops/instanceClassLoaderKlass.inline.hpp 2015-06-22 11:04:44.670683639 +0200 @@ -26,6 +26,7 @@ #define SHARE_VM_OOPS_INSTANCECLASSLOADERKLASS_INLINE_HPP #include "classfile/javaClasses.hpp" +#include "memory/iterator.inline.hpp" #include "oops/instanceClassLoaderKlass.hpp" #include "oops/instanceKlass.inline.hpp" #include "oops/oop.inline.hpp" @@ -41,7 +42,7 @@ ClassLoaderData* cld = java_lang_ClassLoader::loader_data(obj); // cld can be null if we have a non-registered class loader. if (cld != NULL) { - closure->do_class_loader_data(cld); + Devirtualizer::do_cld(closure, cld); } } @@ -70,7 +71,7 @@ ClassLoaderData* cld = java_lang_ClassLoader::loader_data(obj); // cld can be null if we have a non-registered class loader. if (cld != NULL) { - closure->do_class_loader_data(cld); + Devirtualizer::do_cld(closure, cld); } } } --- old/src/share/vm/oops/instanceKlass.hpp 2015-06-22 11:04:45.074685643 +0200 +++ new/src/share/vm/oops/instanceKlass.hpp 2015-06-22 11:04:44.922684889 +0200 @@ -998,7 +998,6 @@ // GC specific object visitors // // Mark Sweep - void oop_ms_follow_contents(oop obj); int oop_ms_adjust_pointers(oop obj); #if INCLUDE_ALL_GCS // Parallel Scavenge --- old/src/share/vm/oops/instanceMirrorKlass.hpp 2015-06-22 11:04:45.298686753 +0200 +++ new/src/share/vm/oops/instanceMirrorKlass.hpp 2015-06-22 11:04:45.166686099 +0200 @@ -91,7 +91,6 @@ // GC specific object visitors // // Mark Sweep - void oop_ms_follow_contents(oop obj); int oop_ms_adjust_pointers(oop obj); #if INCLUDE_ALL_GCS // Parallel Scavenge --- old/src/share/vm/oops/instanceMirrorKlass.inline.hpp 2015-06-22 11:04:45.494687725 +0200 +++ new/src/share/vm/oops/instanceMirrorKlass.inline.hpp 2015-06-22 11:04:45.378687150 +0200 @@ -60,7 +60,16 @@ Klass* klass = java_lang_Class::as_Klass(obj); // We'll get NULL for primitive mirrors. if (klass != NULL) { - Devirtualizer::do_klass(closure, klass); + if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) { + // An anonymous class doesn't have its own class loader, sowhen handling + // the java mirror for an anonymous class we need to make sure its class + // loader data is claimed, this is done by calling do_cld explicitly. + // For non-anonymous classes the call to do_cld is made when the class + // loader itself is handled. + Devirtualizer::do_cld(closure, klass->class_loader_data()); + } else { + Devirtualizer::do_klass(closure, klass); + } } } --- old/src/share/vm/oops/instanceRefKlass.hpp 2015-06-22 11:04:45.742688955 +0200 +++ new/src/share/vm/oops/instanceRefKlass.hpp 2015-06-22 11:04:45.582688162 +0200 @@ -67,7 +67,6 @@ // GC specific object visitors // // Mark Sweep - void oop_ms_follow_contents(oop obj); int oop_ms_adjust_pointers(oop obj); #if INCLUDE_ALL_GCS // Parallel Scavenge --- old/src/share/vm/oops/klass.hpp 2015-06-22 11:04:45.986690165 +0200 +++ new/src/share/vm/oops/klass.hpp 2015-06-22 11:04:45.842689451 +0200 @@ -571,7 +571,6 @@ // GC specific object visitors // // Mark Sweep - virtual void oop_ms_follow_contents(oop obj) = 0; virtual int oop_ms_adjust_pointers(oop obj) = 0; #if INCLUDE_ALL_GCS // Parallel Scavenge --- old/src/share/vm/oops/objArrayKlass.hpp 2015-06-22 11:04:46.194691196 +0200 +++ new/src/share/vm/oops/objArrayKlass.hpp 2015-06-22 11:04:46.074690601 +0200 @@ -105,7 +105,6 @@ // GC specific object visitors // // Mark Sweep - void oop_ms_follow_contents(oop obj); int oop_ms_adjust_pointers(oop obj); #if INCLUDE_ALL_GCS // Parallel Scavenge --- old/src/share/vm/oops/oop.hpp 2015-06-22 11:04:46.390692168 +0200 +++ new/src/share/vm/oops/oop.hpp 2015-06-22 11:04:46.274691593 +0200 @@ -331,7 +331,6 @@ // Garbage Collection support // Mark Sweep - void ms_follow_contents(); // Adjust all pointers in this object to point at it's forwarded location and // return the size of this oop. This is used by the MarkSweep collector. int ms_adjust_pointers(); --- old/src/share/vm/oops/oop.inline.hpp 2015-06-22 11:04:46.622693319 +0200 +++ new/src/share/vm/oops/oop.inline.hpp 2015-06-22 11:04:46.474692585 +0200 @@ -699,10 +699,6 @@ } } -inline void oopDesc::ms_follow_contents() { - klass()->oop_ms_follow_contents(this); -} - inline int oopDesc::ms_adjust_pointers() { debug_only(int check_size = size()); int s = klass()->oop_ms_adjust_pointers(this); --- old/src/share/vm/oops/typeArrayKlass.hpp 2015-06-22 11:04:46.858694489 +0200 +++ new/src/share/vm/oops/typeArrayKlass.hpp 2015-06-22 11:04:46.718693795 +0200 @@ -75,7 +75,6 @@ // GC specific object visitors // // Mark Sweep - void oop_ms_follow_contents(oop obj); int oop_ms_adjust_pointers(oop obj); #if INCLUDE_ALL_GCS // Parallel Scavenge