< prev index next >

src/hotspot/share/oops/klass.cpp

Print this page




 345                                                        Array<InstanceKlass*>* transitive_interfaces) {
 346   assert(num_extra_slots == 0, "override for complex klasses");
 347   assert(transitive_interfaces == NULL, "sanity");
 348   set_secondary_supers(Universe::the_empty_klass_array());
 349   return NULL;
 350 }
 351 
 352 
 353 // superklass links
 354 InstanceKlass* Klass::superklass() const {
 355   assert(super() == NULL || super()->is_instance_klass(), "must be instance klass");
 356   return _super == NULL ? NULL : InstanceKlass::cast(_super);
 357 }
 358 
 359 // subklass links.  Used by the compiler (and vtable initialization)
 360 // May be cleaned concurrently, so must use the Compile_lock.
 361 // The log parameter is for clean_weak_klass_links to report unlinked classes.
 362 Klass* Klass::subklass(bool log) const {
 363   // Need load_acquire on the _subklass, because it races with inserts that
 364   // publishes freshly initialized data.
 365   for (Klass* chain = OrderAccess::load_acquire(&_subklass);
 366        chain != NULL;
 367        // Do not need load_acquire on _next_sibling, because inserts never
 368        // create _next_sibling edges to dead data.
 369        chain = Atomic::load(&chain->_next_sibling))
 370   {
 371     if (chain->is_loader_alive()) {
 372       return chain;
 373     } else if (log) {
 374       if (log_is_enabled(Trace, class, unload)) {
 375         ResourceMark rm;
 376         log_trace(class, unload)("unlinking class (subclass): %s", chain->external_name());
 377       }
 378     }
 379   }
 380   return NULL;
 381 }
 382 
 383 Klass* Klass::next_sibling(bool log) const {
 384   // Do not need load_acquire on _next_sibling, because inserts never
 385   // create _next_sibling edges to dead data.
 386   for (Klass* chain = Atomic::load(&_next_sibling);
 387        chain != NULL;
 388        chain = Atomic::load(&chain->_next_sibling)) {
 389     // Only return alive klass, there may be stale klass
 390     // in this chain if cleaned concurrently.
 391     if (chain->is_loader_alive()) {
 392       return chain;
 393     } else if (log) {
 394       if (log_is_enabled(Trace, class, unload)) {
 395         ResourceMark rm;
 396         log_trace(class, unload)("unlinking class (sibling): %s", chain->external_name());
 397       }
 398     }
 399   }
 400   return NULL;
 401 }
 402 
 403 void Klass::set_subklass(Klass* s) {
 404   assert(s != this, "sanity check");
 405   OrderAccess::release_store(&_subklass, s);
 406 }
 407 
 408 void Klass::set_next_sibling(Klass* s) {
 409   assert(s != this, "sanity check");
 410   // Does not need release semantics. If used by cleanup, it will link to
 411   // already safely published data, and if used by inserts, will be published
 412   // safely using cmpxchg.
 413   Atomic::store(s, &_next_sibling);
 414 }
 415 
 416 void Klass::append_to_sibling_list() {
 417   assert_locked_or_safepoint(Compile_lock);
 418   debug_only(verify();)
 419   // add ourselves to superklass' subklass list
 420   InstanceKlass* super = superklass();
 421   if (super == NULL) return;        // special case: class Object
 422   assert((!super->is_interface()    // interfaces cannot be supers
 423           && (super->superklass() == NULL || !is_interface())),
 424          "an interface can only be a subklass of Object");
 425 
 426   // Make sure there is no stale subklass head
 427   super->clean_subklass();
 428 
 429   for (;;) {
 430     Klass* prev_first_subklass = OrderAccess::load_acquire(&_super->_subklass);
 431     if (prev_first_subklass != NULL) {
 432       // set our sibling to be the superklass' previous first subklass
 433       assert(prev_first_subklass->is_loader_alive(), "May not attach not alive klasses");
 434       set_next_sibling(prev_first_subklass);
 435     }
 436     // Note that the prev_first_subklass is always alive, meaning no sibling_next links
 437     // are ever created to not alive klasses. This is an important invariant of the lock-free
 438     // cleaning protocol, that allows us to safely unlink dead klasses from the sibling list.
 439     if (Atomic::cmpxchg(this, &super->_subklass, prev_first_subklass) == prev_first_subklass) {
 440       return;
 441     }
 442   }
 443   debug_only(verify();)
 444 }
 445 
 446 void Klass::clean_subklass() {
 447   for (;;) {
 448     // Need load_acquire, due to contending with concurrent inserts
 449     Klass* subklass = OrderAccess::load_acquire(&_subklass);
 450     if (subklass == NULL || subklass->is_loader_alive()) {
 451       return;
 452     }
 453     // Try to fix _subklass until it points at something not dead.
 454     Atomic::cmpxchg(subklass->next_sibling(), &_subklass, subklass);
 455   }
 456 }
 457 
 458 void Klass::clean_weak_klass_links(bool unloading_occurred, bool clean_alive_klasses) {
 459   if (!ClassUnloading || !unloading_occurred) {
 460     return;
 461   }
 462 
 463   Klass* root = SystemDictionary::Object_klass();
 464   Stack<Klass*, mtGC> stack;
 465 
 466   stack.push(root);
 467   while (!stack.is_empty()) {
 468     Klass* current = stack.pop();
 469 




 345                                                        Array<InstanceKlass*>* transitive_interfaces) {
 346   assert(num_extra_slots == 0, "override for complex klasses");
 347   assert(transitive_interfaces == NULL, "sanity");
 348   set_secondary_supers(Universe::the_empty_klass_array());
 349   return NULL;
 350 }
 351 
 352 
 353 // superklass links
 354 InstanceKlass* Klass::superklass() const {
 355   assert(super() == NULL || super()->is_instance_klass(), "must be instance klass");
 356   return _super == NULL ? NULL : InstanceKlass::cast(_super);
 357 }
 358 
 359 // subklass links.  Used by the compiler (and vtable initialization)
 360 // May be cleaned concurrently, so must use the Compile_lock.
 361 // The log parameter is for clean_weak_klass_links to report unlinked classes.
 362 Klass* Klass::subklass(bool log) const {
 363   // Need load_acquire on the _subklass, because it races with inserts that
 364   // publishes freshly initialized data.
 365   for (Klass* chain = Atomic::load_acquire(&_subklass);
 366        chain != NULL;
 367        // Do not need load_acquire on _next_sibling, because inserts never
 368        // create _next_sibling edges to dead data.
 369        chain = Atomic::load(&chain->_next_sibling))
 370   {
 371     if (chain->is_loader_alive()) {
 372       return chain;
 373     } else if (log) {
 374       if (log_is_enabled(Trace, class, unload)) {
 375         ResourceMark rm;
 376         log_trace(class, unload)("unlinking class (subclass): %s", chain->external_name());
 377       }
 378     }
 379   }
 380   return NULL;
 381 }
 382 
 383 Klass* Klass::next_sibling(bool log) const {
 384   // Do not need load_acquire on _next_sibling, because inserts never
 385   // create _next_sibling edges to dead data.
 386   for (Klass* chain = Atomic::load(&_next_sibling);
 387        chain != NULL;
 388        chain = Atomic::load(&chain->_next_sibling)) {
 389     // Only return alive klass, there may be stale klass
 390     // in this chain if cleaned concurrently.
 391     if (chain->is_loader_alive()) {
 392       return chain;
 393     } else if (log) {
 394       if (log_is_enabled(Trace, class, unload)) {
 395         ResourceMark rm;
 396         log_trace(class, unload)("unlinking class (sibling): %s", chain->external_name());
 397       }
 398     }
 399   }
 400   return NULL;
 401 }
 402 
 403 void Klass::set_subklass(Klass* s) {
 404   assert(s != this, "sanity check");
 405   Atomic::release_store(&_subklass, s);
 406 }
 407 
 408 void Klass::set_next_sibling(Klass* s) {
 409   assert(s != this, "sanity check");
 410   // Does not need release semantics. If used by cleanup, it will link to
 411   // already safely published data, and if used by inserts, will be published
 412   // safely using cmpxchg.
 413   Atomic::store(s, &_next_sibling);
 414 }
 415 
 416 void Klass::append_to_sibling_list() {
 417   assert_locked_or_safepoint(Compile_lock);
 418   debug_only(verify();)
 419   // add ourselves to superklass' subklass list
 420   InstanceKlass* super = superklass();
 421   if (super == NULL) return;        // special case: class Object
 422   assert((!super->is_interface()    // interfaces cannot be supers
 423           && (super->superklass() == NULL || !is_interface())),
 424          "an interface can only be a subklass of Object");
 425 
 426   // Make sure there is no stale subklass head
 427   super->clean_subklass();
 428 
 429   for (;;) {
 430     Klass* prev_first_subklass = Atomic::load_acquire(&_super->_subklass);
 431     if (prev_first_subklass != NULL) {
 432       // set our sibling to be the superklass' previous first subklass
 433       assert(prev_first_subklass->is_loader_alive(), "May not attach not alive klasses");
 434       set_next_sibling(prev_first_subklass);
 435     }
 436     // Note that the prev_first_subklass is always alive, meaning no sibling_next links
 437     // are ever created to not alive klasses. This is an important invariant of the lock-free
 438     // cleaning protocol, that allows us to safely unlink dead klasses from the sibling list.
 439     if (Atomic::cmpxchg(this, &super->_subklass, prev_first_subklass) == prev_first_subklass) {
 440       return;
 441     }
 442   }
 443   debug_only(verify();)
 444 }
 445 
 446 void Klass::clean_subklass() {
 447   for (;;) {
 448     // Need load_acquire, due to contending with concurrent inserts
 449     Klass* subklass = Atomic::load_acquire(&_subklass);
 450     if (subklass == NULL || subklass->is_loader_alive()) {
 451       return;
 452     }
 453     // Try to fix _subklass until it points at something not dead.
 454     Atomic::cmpxchg(subklass->next_sibling(), &_subklass, subklass);
 455   }
 456 }
 457 
 458 void Klass::clean_weak_klass_links(bool unloading_occurred, bool clean_alive_klasses) {
 459   if (!ClassUnloading || !unloading_occurred) {
 460     return;
 461   }
 462 
 463   Klass* root = SystemDictionary::Object_klass();
 464   Stack<Klass*, mtGC> stack;
 465 
 466   stack.push(root);
 467   while (!stack.is_empty()) {
 468     Klass* current = stack.pop();
 469 


< prev index next >