< prev index next >

src/hotspot/share/oops/instanceKlass.cpp

Print this page




2183 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2184   return dependencies().is_dependent_nmethod(nm);
2185 }
2186 #endif //PRODUCT
2187 
2188 void InstanceKlass::clean_weak_instanceklass_links() {
2189   clean_implementors_list();
2190   clean_method_data();
2191 }
2192 
2193 void InstanceKlass::clean_implementors_list() {
2194   assert(is_loader_alive(), "this klass should be live");
2195   if (is_interface()) {
2196     assert (ClassUnloading, "only called for ClassUnloading");
2197     for (;;) {
2198       // Use load_acquire due to competing with inserts
2199       Klass* impl = Atomic::load_acquire(adr_implementor());
2200       if (impl != NULL && !impl->is_loader_alive()) {
2201         // NULL this field, might be an unloaded klass or NULL
2202         Klass* volatile* klass = adr_implementor();
2203         if (Atomic::cmpxchg((Klass*)NULL, klass, impl) == impl) {
2204           // Successfully unlinking implementor.
2205           if (log_is_enabled(Trace, class, unload)) {
2206             ResourceMark rm;
2207             log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2208           }
2209           return;
2210         }
2211       } else {
2212         return;
2213       }
2214     }
2215   }
2216 }
2217 
2218 void InstanceKlass::clean_method_data() {
2219   for (int m = 0; m < methods()->length(); m++) {
2220     MethodData* mdo = methods()->at(m)->method_data();
2221     if (mdo != NULL) {
2222       MutexLocker ml(SafepointSynchronize::is_at_safepoint() ? NULL : mdo->extra_data_lock());
2223       mdo->clean_method_data(/*always_clean*/false);




2183 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2184   return dependencies().is_dependent_nmethod(nm);
2185 }
2186 #endif //PRODUCT
2187 
2188 void InstanceKlass::clean_weak_instanceklass_links() {
2189   clean_implementors_list();
2190   clean_method_data();
2191 }
2192 
2193 void InstanceKlass::clean_implementors_list() {
2194   assert(is_loader_alive(), "this klass should be live");
2195   if (is_interface()) {
2196     assert (ClassUnloading, "only called for ClassUnloading");
2197     for (;;) {
2198       // Use load_acquire due to competing with inserts
2199       Klass* impl = Atomic::load_acquire(adr_implementor());
2200       if (impl != NULL && !impl->is_loader_alive()) {
2201         // NULL this field, might be an unloaded klass or NULL
2202         Klass* volatile* klass = adr_implementor();
2203         if (Atomic::cmpxchg(klass, impl, (Klass*)NULL) == impl) {
2204           // Successfully unlinking implementor.
2205           if (log_is_enabled(Trace, class, unload)) {
2206             ResourceMark rm;
2207             log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2208           }
2209           return;
2210         }
2211       } else {
2212         return;
2213       }
2214     }
2215   }
2216 }
2217 
2218 void InstanceKlass::clean_method_data() {
2219   for (int m = 0; m < methods()->length(); m++) {
2220     MethodData* mdo = methods()->at(m)->method_data();
2221     if (mdo != NULL) {
2222       MutexLocker ml(SafepointSynchronize::is_at_safepoint() ? NULL : mdo->extra_data_lock());
2223       mdo->clean_method_data(/*always_clean*/false);


< prev index next >