diff --git a/src/hotspot/share/classfile/classLoaderData.cpp b/src/hotspot/share/classfile/classLoaderData.cpp index bc61cce..134f165 100644 --- a/src/hotspot/share/classfile/classLoaderData.cpp +++ b/src/hotspot/share/classfile/classLoaderData.cpp @@ -1117,14 +1117,14 @@ ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_unsafe_anonymo } void ClassLoaderDataGraph::cld_do(CLDClosure* cl) { - assert_locked_or_safepoint(ClassLoaderDataGraph_lock); + assert_locked_or_safepoint_weak(ClassLoaderDataGraph_lock); for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { cl->do_cld(cld); } } void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { - assert_locked_or_safepoint(ClassLoaderDataGraph_lock); + assert_locked_or_safepoint_weak(ClassLoaderDataGraph_lock); // Only walk the head until any clds not purged from prior unloading // (CMS doesn't purge right away). for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { @@ -1134,7 +1134,7 @@ void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { } void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { - assert_locked_or_safepoint(ClassLoaderDataGraph_lock); + assert_locked_or_safepoint_weak(ClassLoaderDataGraph_lock); for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { CLDClosure* closure = cld->keep_alive() ? strong : weak; if (closure != NULL) { @@ -1144,7 +1144,7 @@ void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { } void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) { - assert_locked_or_safepoint(ClassLoaderDataGraph_lock); + assert_locked_or_safepoint_weak(ClassLoaderDataGraph_lock); if (ClassUnloading) { roots_cld_do(cl, NULL); } else { diff --git a/src/hotspot/share/gc/z/zRootsIterator.cpp b/src/hotspot/share/gc/z/zRootsIterator.cpp index f29cfdd..317c68d 100644 --- a/src/hotspot/share/gc/z/zRootsIterator.cpp +++ b/src/hotspot/share/gc/z/zRootsIterator.cpp @@ -143,7 +143,6 @@ ZRootsIterator::ZRootsIterator() : assert(SafepointSynchronize::is_at_safepoint(), "Should be at safepoint"); ZStatTimer timer(ZSubPhasePauseRootsSetup); Threads::change_thread_claim_parity(); - ClassLoaderDataGraph::clear_claimed_marks(); COMPILER2_PRESENT(DerivedPointerTable::clear()); CodeCache::gc_prologue(); ZNMethodTable::gc_prologue(); @@ -240,10 +239,13 @@ ZConcurrentRootsIterator::ZConcurrentRootsIterator() : _jni_handles(this), _class_loader_data_graph(this) { ZStatTimer timer(ZSubPhaseConcurrentRootsSetup); + ClassLoaderDataGraph_lock->lock(); + ClassLoaderDataGraph::clear_claimed_marks(); } ZConcurrentRootsIterator::~ZConcurrentRootsIterator() { ZStatTimer timer(ZSubPhaseConcurrentRootsTeardown); + ClassLoaderDataGraph_lock->unlock(); } void ZConcurrentRootsIterator::do_jni_handles(OopClosure* cl) { diff --git a/src/hotspot/share/runtime/mutexLocker.cpp b/src/hotspot/share/runtime/mutexLocker.cpp index 88dff46..fcfe143 100644 --- a/src/hotspot/share/runtime/mutexLocker.cpp +++ b/src/hotspot/share/runtime/mutexLocker.cpp @@ -167,6 +167,16 @@ void assert_locked_or_safepoint(const Monitor * lock) { fatal("must own lock %s", lock->name()); } +// a weaker assertion than the above +void assert_locked_or_safepoint_weak(const Monitor * lock) { + if (IgnoreLockingAssertions) return; + assert(lock != NULL, "Need non-NULL lock"); + if (lock->is_locked()) return; + if (SafepointSynchronize::is_at_safepoint()) return; + if (!Universe::is_fully_initialized()) return; + fatal("must own lock %s", lock->name()); +} + // a stronger assertion than the above void assert_lock_strong(const Monitor * lock) { if (IgnoreLockingAssertions) return; diff --git a/src/hotspot/share/runtime/mutexLocker.hpp b/src/hotspot/share/runtime/mutexLocker.hpp index 312800a..c95192a 100644 --- a/src/hotspot/share/runtime/mutexLocker.hpp +++ b/src/hotspot/share/runtime/mutexLocker.hpp @@ -199,9 +199,11 @@ class MutexLocker: StackObj { // for debugging: check that we're already owning this lock (or are at a safepoint) #ifdef ASSERT void assert_locked_or_safepoint(const Monitor * lock); +void assert_locked_or_safepoint_weak(const Monitor * lock); void assert_lock_strong(const Monitor * lock); #else #define assert_locked_or_safepoint(lock) +#define assert_locked_or_safepoint_weak(lock) #define assert_lock_strong(lock) #endif