1  /*
   2  * Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 // A ClassLoaderData identifies the full set of class types that a class
  26 // loader's name resolution strategy produces for a given configuration of the
  27 // class loader.
  28 // Class types in the ClassLoaderData may be defined by from class file binaries
  29 // provided by the class loader, or from other class loader it interacts with
  30 // according to its name resolution strategy.
  31 //
  32 // Class loaders that implement a deterministic name resolution strategy
  33 // (including with respect to their delegation behavior), such as the boot, the
  34 // platform, and the system loaders of the JDK's built-in class loader
  35 // hierarchy, always produce the same linkset for a given configuration.
  36 //
  37 // ClassLoaderData carries information related to a linkset (e.g.,
  38 // metaspace holding its klass definitions).
  39 // The System Dictionary and related data structures (e.g., placeholder table,
  40 // loader constraints table) as well as the runtime representation of classes
  41 // only reference ClassLoaderData.
  42 //
  43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that
  44 // that represent the loader's "linking domain" in the JVM.
  45 //
  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"
  50 #include "classfile/classLoaderData.hpp"
  51 #include "classfile/classLoaderData.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/metadataOnStackMark.hpp"
  55 #include "classfile/moduleEntry.hpp"
  56 #include "classfile/packageEntry.hpp"
  57 #include "classfile/symbolTable.hpp"
  58 #include "classfile/systemDictionary.hpp"
  59 #include "logging/log.hpp"
  60 #include "logging/logStream.hpp"
  61 #include "memory/allocation.inline.hpp"
  62 #include "memory/metadataFactory.hpp"
  63 #include "memory/metaspaceShared.hpp"
  64 #include "memory/resourceArea.hpp"
  65 #include "memory/universe.hpp"
  66 #include "oops/access.inline.hpp"
  67 #include "oops/oop.inline.hpp"
  68 #include "oops/oopHandle.inline.hpp"
  69 #include "oops/weakHandle.inline.hpp"
  70 #include "runtime/atomic.hpp"
  71 #include "runtime/handles.inline.hpp"
  72 #include "runtime/mutex.hpp"
  73 #include "runtime/orderAccess.hpp"
  74 #include "runtime/safepoint.hpp"
  75 #include "runtime/safepointVerifiers.hpp"
  76 #include "utilities/growableArray.hpp"
  77 #include "utilities/macros.hpp"
  78 #include "utilities/ostream.hpp"
  79 #include "utilities/ticks.hpp"
  80 #if INCLUDE_JFR
  81 #include "jfr/jfr.hpp"
  82 #include "jfr/jfrEvents.hpp"
  83 #endif
  84 
  85 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0;
  86 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0;
  87 
  88 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  89 
  90 void ClassLoaderData::init_null_class_loader_data() {
  91   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  92   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  93 
  94   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  95   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  96   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  97 
  98   LogTarget(Trace, class, loader, data) lt;
  99   if (lt.is_enabled()) {
 100     ResourceMark rm;
 101     LogStream ls(lt);
 102     ls.print("create ");
 103     _the_null_class_loader_data->print_value_on(&ls);
 104     ls.cr();
 105   }
 106 }
 107 
 108 // Obtain and set the class loader's name within the ClassLoaderData so
 109 // it will be available for error messages, logging, JFR, etc.  The name
 110 // and klass are available after the class_loader oop is no longer alive,
 111 // during unloading.
 112 void ClassLoaderData::initialize_name(Handle class_loader) {
 113   Thread* THREAD = Thread::current();
 114   ResourceMark rm(THREAD);
 115 
 116   // Obtain the class loader's name.  If the class loader's name was not
 117   // explicitly set during construction, the CLD's _name field will be null.
 118   oop cl_name = java_lang_ClassLoader::name(class_loader());
 119   if (cl_name != NULL) {
 120     const char* cl_instance_name = java_lang_String::as_utf8_string(cl_name);
 121 
 122     if (cl_instance_name != NULL && cl_instance_name[0] != '\0') {
 123       // Can't throw InternalError and SymbolTable doesn't throw OOM anymore.
 124       _name = SymbolTable::new_symbol(cl_instance_name, CATCH);
 125     }
 126   }
 127 
 128   // Obtain the class loader's name and identity hash.  If the class loader's
 129   // name was not explicitly set during construction, the class loader's name and id
 130   // will be set to the qualified class name of the class loader along with its
 131   // identity hash.
 132   // If for some reason the ClassLoader's constructor has not been run, instead of
 133   // leaving the _name_and_id field null, fall back to the external qualified class
 134   // name.  Thus CLD's _name_and_id field should never have a null value.
 135   oop cl_name_and_id = java_lang_ClassLoader::nameAndId(class_loader());
 136   const char* cl_instance_name_and_id =
 137                   (cl_name_and_id == NULL) ? _class_loader_klass->external_name() :
 138                                              java_lang_String::as_utf8_string(cl_name_and_id);
 139   assert(cl_instance_name_and_id != NULL && cl_instance_name_and_id[0] != '\0', "class loader has no name and id");
 140   // Can't throw InternalError and SymbolTable doesn't throw OOM anymore.
 141   _name_and_id = SymbolTable::new_symbol(cl_instance_name_and_id, CATCH);
 142 }
 143 
 144 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_unsafe_anonymous) :
 145   _metaspace(NULL),
 146   _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true,
 147                             Monitor::_safepoint_check_never)),
 148   _unloading(false), _is_unsafe_anonymous(is_unsafe_anonymous),
 149   _modified_oops(true), _accumulated_modified_oops(false),
 150   // An unsafe anonymous class loader data doesn't have anything to keep
 151   // it from being unloaded during parsing of the unsafe anonymous class.
 152   // The null-class-loader should always be kept alive.
 153   _keep_alive((is_unsafe_anonymous || h_class_loader.is_null()) ? 1 : 0),
 154   _claimed(0),
 155   _handles(),
 156   _klasses(NULL), _packages(NULL), _modules(NULL), _unnamed_module(NULL), _dictionary(NULL),
 157   _jmethod_ids(NULL),
 158   _deallocate_list(NULL),
 159   _next(NULL),
 160   _class_loader_klass(NULL), _name(NULL), _name_and_id(NULL) {
 161 
 162   if (!h_class_loader.is_null()) {
 163     _class_loader = _handles.add(h_class_loader());
 164     _class_loader_klass = h_class_loader->klass();
 165   }
 166 
 167   if (!is_unsafe_anonymous) {
 168     // The holder is initialized later for unsafe anonymous classes, and before calling anything
 169     // that call class_loader().
 170     initialize_holder(h_class_loader);
 171 
 172     // A ClassLoaderData created solely for an unsafe anonymous class should never have a
 173     // ModuleEntryTable or PackageEntryTable created for it. The defining package
 174     // and module for an unsafe anonymous class will be found in its host class.
 175     _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size);
 176     if (h_class_loader.is_null()) {
 177       // Create unnamed module for boot loader
 178       _unnamed_module = ModuleEntry::create_boot_unnamed_module(this);
 179     } else {
 180       // Create unnamed module for all other loaders
 181       _unnamed_module = ModuleEntry::create_unnamed_module(this);
 182     }
 183     _dictionary = create_dictionary();
 184   }
 185 
 186   NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies
 187 
 188   JFR_ONLY(INIT_ID(this);)
 189 }
 190 
 191 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
 192   Chunk* c = _head;
 193   while (c != NULL) {
 194     Chunk* next = c->_next;
 195     delete c;
 196     c = next;
 197   }
 198 }
 199 
 200 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
 201   if (_head == NULL || _head->_size == Chunk::CAPACITY) {
 202     Chunk* next = new Chunk(_head);
 203     OrderAccess::release_store(&_head, next);
 204   }
 205   oop* handle = &_head->_data[_head->_size];
 206   *handle = o;
 207   OrderAccess::release_store(&_head->_size, _head->_size + 1);
 208   return handle;
 209 }
 210 
 211 int ClassLoaderData::ChunkedHandleList::count() const {
 212   int count = 0;
 213   Chunk* chunk = _head;
 214   while (chunk != NULL) {
 215     count += chunk->_size;
 216     chunk = chunk->_next;
 217   }
 218   return count;
 219 }
 220 
 221 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
 222   for (juint i = 0; i < size; i++) {
 223     if (c->_data[i] != NULL) {
 224       f->do_oop(&c->_data[i]);
 225     }
 226   }
 227 }
 228 
 229 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
 230   Chunk* head = OrderAccess::load_acquire(&_head);
 231   if (head != NULL) {
 232     // Must be careful when reading size of head
 233     oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
 234     for (Chunk* c = head->_next; c != NULL; c = c->_next) {
 235       oops_do_chunk(f, c, c->_size);
 236     }
 237   }
 238 }
 239 
 240 class VerifyContainsOopClosure : public OopClosure {
 241   oop  _target;
 242   bool _found;
 243 
 244  public:
 245   VerifyContainsOopClosure(oop target) : _target(target), _found(false) {}
 246 
 247   void do_oop(oop* p) {
 248     if (p != NULL && oopDesc::equals(RawAccess<>::oop_load(p), _target)) {
 249       _found = true;
 250     }
 251   }
 252 
 253   void do_oop(narrowOop* p) {
 254     // The ChunkedHandleList should not contain any narrowOop
 255     ShouldNotReachHere();
 256   }
 257 
 258   bool found() const {
 259     return _found;
 260   }
 261 };
 262 
 263 bool ClassLoaderData::ChunkedHandleList::contains(oop p) {
 264   VerifyContainsOopClosure cl(p);
 265   oops_do(&cl);
 266   return cl.found();
 267 }
 268 
 269 #ifndef PRODUCT
 270 bool ClassLoaderData::ChunkedHandleList::owner_of(oop* oop_handle) {
 271   Chunk* chunk = _head;
 272   while (chunk != NULL) {
 273     if (&(chunk->_data[0]) <= oop_handle && oop_handle < &(chunk->_data[chunk->_size])) {
 274       return true;
 275     }
 276     chunk = chunk->_next;
 277   }
 278   return false;
 279 }
 280 #endif // PRODUCT
 281 
 282 bool ClassLoaderData::claim() {
 283   if (_claimed == 1) {
 284     return false;
 285   }
 286 
 287   return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0;
 288 }
 289 
 290 // Unsafe anonymous classes have their own ClassLoaderData that is marked to keep alive
 291 // while the class is being parsed, and if the class appears on the module fixup list.
 292 // Due to the uniqueness that no other class shares the unsafe anonymous class' name or
 293 // ClassLoaderData, no other non-GC thread has knowledge of the unsafe anonymous class while
 294 // it is being defined, therefore _keep_alive is not volatile or atomic.
 295 void ClassLoaderData::inc_keep_alive() {
 296   if (is_unsafe_anonymous()) {
 297     assert(_keep_alive >= 0, "Invalid keep alive increment count");
 298     _keep_alive++;
 299   }
 300 }
 301 
 302 void ClassLoaderData::dec_keep_alive() {
 303   if (is_unsafe_anonymous()) {
 304     assert(_keep_alive > 0, "Invalid keep alive decrement count");
 305     _keep_alive--;
 306   }
 307 }
 308 
 309 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) {
 310   if (must_claim && !claim()) {
 311     return;
 312   }
 313 
 314   // Only clear modified_oops after the ClassLoaderData is claimed.
 315   if (clear_mod_oops) {
 316     clear_modified_oops();
 317   }
 318 
 319   _handles.oops_do(f);
 320 }
 321 
 322 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
 323   // Lock-free access requires load_acquire
 324   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 325     klass_closure->do_klass(k);
 326     assert(k != k->next_link(), "no loops!");
 327   }
 328 }
 329 
 330 void ClassLoaderData::classes_do(void f(Klass * const)) {
 331   // Lock-free access requires load_acquire
 332   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 333     f(k);
 334     assert(k != k->next_link(), "no loops!");
 335   }
 336 }
 337 
 338 void ClassLoaderData::methods_do(void f(Method*)) {
 339   // Lock-free access requires load_acquire
 340   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 341     if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
 342       InstanceKlass::cast(k)->methods_do(f);
 343     }
 344   }
 345 }
 346 
 347 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
 348   // Lock-free access requires load_acquire
 349   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 350     // Do not filter ArrayKlass oops here...
 351     if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
 352       klass_closure->do_klass(k);
 353     }
 354   }
 355 }
 356 
 357 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 358   // Lock-free access requires load_acquire
 359   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 360     if (k->is_instance_klass()) {
 361       f(InstanceKlass::cast(k));
 362     }
 363     assert(k != k->next_link(), "no loops!");
 364   }
 365 }
 366 
 367 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 368   assert_locked_or_safepoint(Module_lock);
 369   if (_unnamed_module != NULL) {
 370     f(_unnamed_module);
 371   }
 372   if (_modules != NULL) {
 373     for (int i = 0; i < _modules->table_size(); i++) {
 374       for (ModuleEntry* entry = _modules->bucket(i);
 375            entry != NULL;
 376            entry = entry->next()) {
 377         f(entry);
 378       }
 379     }
 380   }
 381 }
 382 
 383 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 384   assert_locked_or_safepoint(Module_lock);
 385   if (_packages != NULL) {
 386     for (int i = 0; i < _packages->table_size(); i++) {
 387       for (PackageEntry* entry = _packages->bucket(i);
 388            entry != NULL;
 389            entry = entry->next()) {
 390         f(entry);
 391       }
 392     }
 393   }
 394 }
 395 
 396 void ClassLoaderData::record_dependency(const Klass* k) {
 397   assert(k != NULL, "invariant");
 398 
 399   ClassLoaderData * const from_cld = this;
 400   ClassLoaderData * const to_cld = k->class_loader_data();
 401 
 402   // Do not need to record dependency if the dependency is to a class whose
 403   // class loader data is never freed.  (i.e. the dependency's class loader
 404   // is one of the three builtin class loaders and the dependency is not
 405   // unsafe anonymous.)
 406   if (to_cld->is_permanent_class_loader_data()) {
 407     return;
 408   }
 409 
 410   oop to;
 411   if (to_cld->is_unsafe_anonymous()) {
 412     // Just return if an unsafe anonymous class is attempting to record a dependency
 413     // to itself.  (Note that every unsafe anonymous class has its own unique class
 414     // loader data.)
 415     if (to_cld == from_cld) {
 416       return;
 417     }
 418     // Unsafe anonymous class dependencies are through the mirror.
 419     to = k->java_mirror();
 420   } else {
 421     to = to_cld->class_loader();
 422     oop from = from_cld->class_loader();
 423 
 424     // Just return if this dependency is to a class with the same or a parent
 425     // class_loader.
 426     if (oopDesc::equals(from, to) || java_lang_ClassLoader::isAncestor(from, to)) {
 427       return; // this class loader is in the parent list, no need to add it.
 428     }
 429   }
 430 
 431   // It's a dependency we won't find through GC, add it.
 432   if (!_handles.contains(to)) {
 433     NOT_PRODUCT(Atomic::inc(&_dependency_count));
 434     LogTarget(Trace, class, loader, data) lt;
 435     if (lt.is_enabled()) {
 436       ResourceMark rm;
 437       LogStream ls(lt);
 438       ls.print("adding dependency from ");
 439       print_value_on(&ls);
 440       ls.print(" to ");
 441       to_cld->print_value_on(&ls);
 442       ls.cr();
 443     }
 444     Handle dependency(Thread::current(), to);
 445     add_handle(dependency);
 446     // Added a potentially young gen oop to the ClassLoaderData
 447     record_modified_oops();
 448   }
 449 }
 450 
 451 
 452 void ClassLoaderDataGraph::clear_claimed_marks() {
 453   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
 454     cld->clear_claimed();
 455   }
 456 }
 457 
 458 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
 459   {
 460     MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 461     Klass* old_value = _klasses;
 462     k->set_next_link(old_value);
 463     // Link the new item into the list, making sure the linked class is stable
 464     // since the list can be walked without a lock
 465     OrderAccess::release_store(&_klasses, k);
 466     if (k->is_array_klass()) {
 467       ClassLoaderDataGraph::inc_array_classes(1);
 468     } else {
 469       ClassLoaderDataGraph::inc_instance_classes(1);
 470     }
 471   }
 472 
 473   if (publicize) {
 474     LogTarget(Trace, class, loader, data) lt;
 475     if (lt.is_enabled()) {
 476       ResourceMark rm;
 477       LogStream ls(lt);
 478       ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name());
 479       print_value_on(&ls);
 480       ls.cr();
 481     }
 482   }
 483 }
 484 
 485 // Class iterator used by the compiler.  It gets some number of classes at
 486 // a safepoint to decay invocation counters on the methods.
 487 class ClassLoaderDataGraphKlassIteratorStatic {
 488   ClassLoaderData* _current_loader_data;
 489   Klass*           _current_class_entry;
 490  public:
 491 
 492   ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {}
 493 
 494   InstanceKlass* try_get_next_class() {
 495     assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 496     size_t max_classes = ClassLoaderDataGraph::num_instance_classes();
 497     assert(max_classes > 0, "should not be called with no instance classes");
 498     for (size_t i = 0; i < max_classes; ) {
 499 
 500       if (_current_class_entry != NULL) {
 501         Klass* k = _current_class_entry;
 502         _current_class_entry = _current_class_entry->next_link();
 503 
 504         if (k->is_instance_klass()) {
 505           InstanceKlass* ik = InstanceKlass::cast(k);
 506           i++;  // count all instance classes found
 507           // Not yet loaded classes are counted in max_classes
 508           // but only return loaded classes.
 509           if (ik->is_loaded()) {
 510             return ik;
 511           }
 512         }
 513       } else {
 514         // Go to next CLD
 515         if (_current_loader_data != NULL) {
 516           _current_loader_data = _current_loader_data->next();
 517         }
 518         // Start at the beginning
 519         if (_current_loader_data == NULL) {
 520           _current_loader_data = ClassLoaderDataGraph::_head;
 521         }
 522 
 523         _current_class_entry = _current_loader_data->klasses();
 524       }
 525     }
 526     // Should never be reached unless all instance classes have failed or are not fully loaded.
 527     // Caller handles NULL.
 528     return NULL;
 529   }
 530 
 531   // If the current class for the static iterator is a class being unloaded or
 532   // deallocated, adjust the current class.
 533   void adjust_saved_class(ClassLoaderData* cld) {
 534     if (_current_loader_data == cld) {
 535       _current_loader_data = cld->next();
 536       if (_current_loader_data != NULL) {
 537         _current_class_entry = _current_loader_data->klasses();
 538       }  // else try_get_next_class will start at the head
 539     }
 540   }
 541 
 542   void adjust_saved_class(Klass* klass) {
 543     if (_current_class_entry == klass) {
 544       _current_class_entry = klass->next_link();
 545     }
 546   }
 547 };
 548 
 549 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator;
 550 
 551 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() {
 552   return static_klass_iterator.try_get_next_class();
 553 }
 554 
 555 
 556 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) {
 557   if (loader_or_mirror() != NULL) {
 558     assert(_holder.is_null(), "never replace holders");
 559     _holder = WeakHandle<vm_class_loader_data>::create(loader_or_mirror);
 560   }
 561 }
 562 
 563 // Remove a klass from the _klasses list for scratch_class during redefinition
 564 // or parsed class in the case of an error.
 565 void ClassLoaderData::remove_class(Klass* scratch_class) {
 566   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 567 
 568   // Adjust global class iterator.
 569   static_klass_iterator.adjust_saved_class(scratch_class);
 570 
 571   Klass* prev = NULL;
 572   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 573     if (k == scratch_class) {
 574       if (prev == NULL) {
 575         _klasses = k->next_link();
 576       } else {
 577         Klass* next = k->next_link();
 578         prev->set_next_link(next);
 579       }
 580 
 581       if (k->is_array_klass()) {
 582         ClassLoaderDataGraph::dec_array_classes(1);
 583       } else {
 584         ClassLoaderDataGraph::dec_instance_classes(1);
 585       }
 586 
 587       return;
 588     }
 589     prev = k;
 590     assert(k != k->next_link(), "no loops!");
 591   }
 592   ShouldNotReachHere();   // should have found this class!!
 593 }
 594 
 595 void ClassLoaderData::unload() {
 596   _unloading = true;
 597 
 598   LogTarget(Trace, class, loader, data) lt;
 599   if (lt.is_enabled()) {
 600     ResourceMark rm;
 601     LogStream ls(lt);
 602     ls.print("unload");
 603     print_value_on(&ls);
 604     ls.cr();
 605   }
 606 
 607   // Some items on the _deallocate_list need to free their C heap structures
 608   // if they are not already on the _klasses list.
 609   free_deallocate_list_C_heap_structures();
 610 
 611   // Tell serviceability tools these classes are unloading
 612   // after erroneous classes are released.
 613   classes_do(InstanceKlass::notify_unload_class);
 614 
 615   // Clean up global class iterator for compiler
 616   static_klass_iterator.adjust_saved_class(this);
 617 }
 618 
 619 ModuleEntryTable* ClassLoaderData::modules() {
 620   // Lazily create the module entry table at first request.
 621   // Lock-free access requires load_acquire.
 622   ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
 623   if (modules == NULL) {
 624     MutexLocker m1(Module_lock);
 625     // Check if _modules got allocated while we were waiting for this lock.
 626     if ((modules = _modules) == NULL) {
 627       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 628 
 629       {
 630         MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 631         // Ensure _modules is stable, since it is examined without a lock
 632         OrderAccess::release_store(&_modules, modules);
 633       }
 634     }
 635   }
 636   return modules;
 637 }
 638 
 639 const int _boot_loader_dictionary_size    = 1009;
 640 const int _default_loader_dictionary_size = 107;
 641 
 642 Dictionary* ClassLoaderData::create_dictionary() {
 643   assert(!is_unsafe_anonymous(), "unsafe anonymous class loader data do not have a dictionary");
 644   int size;
 645   bool resizable = false;
 646   if (_the_null_class_loader_data == NULL) {
 647     size = _boot_loader_dictionary_size;
 648     resizable = true;
 649   } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 650     size = 1;  // there's only one class in relection class loader and no initiated classes
 651   } else if (is_system_class_loader_data()) {
 652     size = _boot_loader_dictionary_size;
 653     resizable = true;
 654   } else {
 655     size = _default_loader_dictionary_size;
 656     resizable = true;
 657   }
 658   if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces) {
 659     resizable = false;
 660   }
 661   return new Dictionary(this, size, resizable);
 662 }
 663 
 664 // Tell the GC to keep this klass alive while iterating ClassLoaderDataGraph
 665 oop ClassLoaderData::holder_phantom() const {
 666   // A klass that was previously considered dead can be looked up in the
 667   // CLD/SD, and its _java_mirror or _class_loader can be stored in a root
 668   // or a reachable object making it alive again. The SATB part of G1 needs
 669   // to get notified about this potential resurrection, otherwise the marking
 670   // might not find the object.
 671   if (!_holder.is_null()) {  // NULL class_loader
 672     return _holder.resolve();
 673   } else {
 674     return NULL;
 675   }
 676 }
 677 
 678 // Unloading support
 679 bool ClassLoaderData::is_alive() const {
 680   bool alive = keep_alive()         // null class loader and incomplete unsafe anonymous klasses.
 681       || (_holder.peek() != NULL);  // and not cleaned by the GC weak handle processing.
 682 
 683   return alive;
 684 }
 685 
 686 class ReleaseKlassClosure: public KlassClosure {
 687 private:
 688   size_t  _instance_class_released;
 689   size_t  _array_class_released;
 690 public:
 691   ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { }
 692 
 693   size_t instance_class_released() const { return _instance_class_released; }
 694   size_t array_class_released()    const { return _array_class_released;    }
 695 
 696   void do_klass(Klass* k) {
 697     if (k->is_array_klass()) {
 698       _array_class_released ++;
 699     } else {
 700       assert(k->is_instance_klass(), "Must be");
 701       _instance_class_released ++;
 702       InstanceKlass::release_C_heap_structures(InstanceKlass::cast(k));
 703     }
 704   }
 705 };
 706 
 707 ClassLoaderData::~ClassLoaderData() {
 708   // Release C heap structures for all the classes.
 709   ReleaseKlassClosure cl;
 710   classes_do(&cl);
 711 
 712   ClassLoaderDataGraph::dec_array_classes(cl.array_class_released());
 713   ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released());
 714 
 715   // Release the WeakHandle
 716   _holder.release();
 717 
 718   // Release C heap allocated hashtable for all the packages.
 719   if (_packages != NULL) {
 720     // Destroy the table itself
 721     delete _packages;
 722     _packages = NULL;
 723   }
 724 
 725   // Release C heap allocated hashtable for all the modules.
 726   if (_modules != NULL) {
 727     // Destroy the table itself
 728     delete _modules;
 729     _modules = NULL;
 730   }
 731 
 732   // Release C heap allocated hashtable for the dictionary
 733   if (_dictionary != NULL) {
 734     // Destroy the table itself
 735     delete _dictionary;
 736     _dictionary = NULL;
 737   }
 738 
 739   if (_unnamed_module != NULL) {
 740     _unnamed_module->delete_unnamed_module();
 741     _unnamed_module = NULL;
 742   }
 743 
 744   // release the metaspace
 745   ClassLoaderMetaspace *m = _metaspace;
 746   if (m != NULL) {
 747     _metaspace = NULL;
 748     delete m;
 749   }
 750   // Clear all the JNI handles for methods
 751   // These aren't deallocated and are going to look like a leak, but that's
 752   // needed because we can't really get rid of jmethodIDs because we don't
 753   // know when native code is going to stop using them.  The spec says that
 754   // they're "invalid" but existing programs likely rely on their being
 755   // NULL after class unloading.
 756   if (_jmethod_ids != NULL) {
 757     Method::clear_jmethod_ids(this);
 758   }
 759   // Delete lock
 760   delete _metaspace_lock;
 761 
 762   // Delete free list
 763   if (_deallocate_list != NULL) {
 764     delete _deallocate_list;
 765   }
 766 }
 767 
 768 // Returns true if this class loader data is for the app class loader
 769 // or a user defined system class loader.  (Note that the class loader
 770 // data may be unsafe anonymous.)
 771 bool ClassLoaderData::is_system_class_loader_data() const {
 772   return SystemDictionary::is_system_class_loader(class_loader());
 773 }
 774 
 775 // Returns true if this class loader data is for the platform class loader.
 776 // (Note that the class loader data may be unsafe anonymous.)
 777 bool ClassLoaderData::is_platform_class_loader_data() const {
 778   return SystemDictionary::is_platform_class_loader(class_loader());
 779 }
 780 
 781 // Returns true if the class loader for this class loader data is one of
 782 // the 3 builtin (boot application/system or platform) class loaders,
 783 // including a user-defined system class loader.  Note that if the class
 784 // loader data is for an unsafe anonymous class then it may get freed by a GC
 785 // even if its class loader is one of these loaders.
 786 bool ClassLoaderData::is_builtin_class_loader_data() const {
 787   return (is_boot_class_loader_data() ||
 788           SystemDictionary::is_system_class_loader(class_loader()) ||
 789           SystemDictionary::is_platform_class_loader(class_loader()));
 790 }
 791 
 792 // Returns true if this class loader data is a class loader data
 793 // that is not ever freed by a GC.  It must be the CLD for one of the builtin
 794 // class loaders and not the CLD for an unsafe anonymous class.
 795 bool ClassLoaderData::is_permanent_class_loader_data() const {
 796   return is_builtin_class_loader_data() && !is_unsafe_anonymous();
 797 }
 798 
 799 ClassLoaderMetaspace* ClassLoaderData::metaspace_non_null() {
 800   // If the metaspace has not been allocated, create a new one.  Might want
 801   // to create smaller arena for Reflection class loaders also.
 802   // The reason for the delayed allocation is because some class loaders are
 803   // simply for delegating with no metadata of their own.
 804   // Lock-free access requires load_acquire.
 805   ClassLoaderMetaspace* metaspace = OrderAccess::load_acquire(&_metaspace);
 806   if (metaspace == NULL) {
 807     MutexLockerEx ml(_metaspace_lock,  Mutex::_no_safepoint_check_flag);
 808     // Check if _metaspace got allocated while we were waiting for this lock.
 809     if ((metaspace = _metaspace) == NULL) {
 810       if (this == the_null_class_loader_data()) {
 811         assert (class_loader() == NULL, "Must be");
 812         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
 813       } else if (is_unsafe_anonymous()) {
 814         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::UnsafeAnonymousMetaspaceType);
 815       } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 816         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
 817       } else {
 818         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
 819       }
 820       // Ensure _metaspace is stable, since it is examined without a lock
 821       OrderAccess::release_store(&_metaspace, metaspace);
 822     }
 823   }
 824   return metaspace;
 825 }
 826 
 827 OopHandle ClassLoaderData::add_handle(Handle h) {
 828   MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 829   record_modified_oops();
 830   return OopHandle(_handles.add(h()));
 831 }
 832 
 833 void ClassLoaderData::remove_handle(OopHandle h) {
 834   assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading");
 835   oop* ptr = h.ptr_raw();
 836   if (ptr != NULL) {
 837     assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
 838     NativeAccess<>::oop_store(ptr, oop(NULL));
 839   }
 840 }
 841 
 842 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) {
 843   MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 844   if (dest.resolve() != NULL) {
 845     return;
 846   } else {
 847     dest = _handles.add(h());
 848   }
 849 }
 850 
 851 // Add this metadata pointer to be freed when it's safe.  This is only during
 852 // a safepoint which checks if handles point to this metadata field.
 853 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
 854   // Metadata in shared region isn't deleted.
 855   if (!m->is_shared()) {
 856     MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 857     if (_deallocate_list == NULL) {
 858       _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true);
 859     }
 860     _deallocate_list->append_if_missing(m);
 861     log_debug(class, loader, data)("deallocate added for %s", m->print_value_string());
 862     ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 863   }
 864 }
 865 
 866 // Deallocate free metadata on the free list.  How useful the PermGen was!
 867 void ClassLoaderData::free_deallocate_list() {
 868   // Don't need lock, at safepoint
 869   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 870   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 871   if (_deallocate_list == NULL) {
 872     return;
 873   }
 874   // Go backwards because this removes entries that are freed.
 875   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 876     Metadata* m = _deallocate_list->at(i);
 877     if (!m->on_stack()) {
 878       _deallocate_list->remove_at(i);
 879       // There are only three types of metadata that we deallocate directly.
 880       // Cast them so they can be used by the template function.
 881       if (m->is_method()) {
 882         MetadataFactory::free_metadata(this, (Method*)m);
 883       } else if (m->is_constantPool()) {
 884         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 885       } else if (m->is_klass()) {
 886         MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 887       } else {
 888         ShouldNotReachHere();
 889       }
 890     } else {
 891       // Metadata is alive.
 892       // If scratch_class is on stack then it shouldn't be on this list!
 893       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 894              "scratch classes on this list should be dead");
 895       // Also should assert that other metadata on the list was found in handles.
 896       // Some cleaning remains.
 897       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 898     }
 899   }
 900 }
 901 
 902 void ClassLoaderDataGraph::clean_deallocate_lists(bool walk_previous_versions) {
 903   uint loaders_processed = 0;
 904   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
 905     // is_alive check will be necessary for concurrent class unloading.
 906     if (cld->is_alive()) {
 907       // clean metaspace
 908       if (walk_previous_versions) {
 909         cld->classes_do(InstanceKlass::purge_previous_versions);
 910       }
 911       cld->free_deallocate_list();
 912       loaders_processed++;
 913     }
 914   }
 915   log_debug(class, loader, data)("clean_deallocate_lists: loaders processed %u %s",
 916                                  loaders_processed, walk_previous_versions ? "walk_previous_versions" : "");
 917 }
 918 
 919 void ClassLoaderDataGraph::walk_metadata_and_clean_metaspaces() {
 920   assert(SafepointSynchronize::is_at_safepoint(), "must only be called at safepoint");
 921 
 922   _should_clean_deallocate_lists = false; // assume everything gets cleaned
 923 
 924   // Mark metadata seen on the stack so we can delete unreferenced entries.
 925   // Walk all metadata, including the expensive code cache walk, only for class redefinition.
 926   // The MetadataOnStackMark walk during redefinition saves previous versions if it finds old methods
 927   // on the stack or in the code cache, so we only have to repeat the full walk if
 928   // they were found at that time.
 929   // TODO: have redefinition clean old methods out of the code cache.  They still exist in some places.
 930   bool walk_all_metadata = InstanceKlass::has_previous_versions_and_reset();
 931 
 932   MetadataOnStackMark md_on_stack(walk_all_metadata);
 933   clean_deallocate_lists(walk_all_metadata);
 934 }
 935 
 936 // This is distinct from free_deallocate_list.  For class loader data that are
 937 // unloading, this frees the C heap memory for items on the list, and unlinks
 938 // scratch or error classes so that unloading events aren't triggered for these
 939 // classes. The metadata is removed with the unloading metaspace.
 940 // There isn't C heap memory allocated for methods, so nothing is done for them.
 941 void ClassLoaderData::free_deallocate_list_C_heap_structures() {
 942   // Don't need lock, at safepoint
 943   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 944   assert(is_unloading(), "only called for ClassLoaderData that are unloading");
 945   if (_deallocate_list == NULL) {
 946     return;
 947   }
 948   // Go backwards because this removes entries that are freed.
 949   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 950     Metadata* m = _deallocate_list->at(i);
 951     _deallocate_list->remove_at(i);
 952     if (m->is_constantPool()) {
 953       ((ConstantPool*)m)->release_C_heap_structures();
 954     } else if (m->is_klass()) {
 955       InstanceKlass* ik = (InstanceKlass*)m;
 956       // also releases ik->constants() C heap memory
 957       InstanceKlass::release_C_heap_structures(ik);
 958       // Remove the class so unloading events aren't triggered for
 959       // this class (scratch or error class) in do_unloading().
 960       remove_class(ik);
 961     }
 962   }
 963 }
 964 
 965 // These CLDs are to contain unsafe anonymous classes used for JSR292
 966 ClassLoaderData* ClassLoaderData::unsafe_anonymous_class_loader_data(Handle loader) {
 967   // Add a new class loader data to the graph.
 968   return ClassLoaderDataGraph::add(loader, true);
 969 }
 970 
 971 // Caller needs ResourceMark
 972 // If the class loader's _name has not been explicitly set, the class loader's
 973 // qualified class name is returned.
 974 const char* ClassLoaderData::loader_name() const {
 975    if (_class_loader_klass == NULL) {
 976      return BOOTSTRAP_LOADER_NAME;
 977    } else if (_name != NULL) {
 978      return _name->as_C_string();
 979    } else {
 980      return _class_loader_klass->external_name();
 981    }
 982 }
 983 
 984 // Caller needs ResourceMark
 985 // Format of the _name_and_id is as follows:
 986 //   If the defining loader has a name explicitly set then '<loader-name>' @<id>
 987 //   If the defining loader has no name then <qualified-class-name> @<id>
 988 //   If built-in loader, then omit '@<id>' as there is only one instance.
 989 const char* ClassLoaderData::loader_name_and_id() const {
 990   if (_class_loader_klass == NULL) {
 991     return "'" BOOTSTRAP_LOADER_NAME "'";
 992   } else if (_name_and_id != NULL) {
 993     return _name_and_id->as_C_string();
 994   } else {
 995     // May be called in a race before _name_and_id is initialized.
 996     return _class_loader_klass->external_name();
 997   }
 998 }
 999 
1000 void ClassLoaderData::print_value_on(outputStream* out) const {
1001   if (!is_unloading() && class_loader() != NULL) {
1002     out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this));
1003     class_loader()->print_value_on(out);  // includes loader_name_and_id() and address of class loader instance
1004   } else {
1005     // loader data: 0xsomeaddr of 'bootstrap'
1006     out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name_and_id());
1007   }
1008   if (is_unsafe_anonymous()) {
1009     out->print(" unsafe anonymous");
1010   }
1011 }
1012 
1013 #ifndef PRODUCT
1014 void ClassLoaderData::print_on(outputStream* out) const {
1015   out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: %s {",
1016               p2i(this), p2i(_class_loader.ptr_raw()), loader_name_and_id());
1017   if (is_unsafe_anonymous()) out->print(" unsafe anonymous");
1018   if (claimed()) out->print(" claimed");
1019   if (is_unloading()) out->print(" unloading");
1020   out->print(" metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null()));
1021 
1022   if (_jmethod_ids != NULL) {
1023     Method::print_jmethod_ids(this, out);
1024   }
1025   out->print(" handles count %d", _handles.count());
1026   out->print(" dependencies %d", _dependency_count);
1027   out->print_cr("}");
1028 }
1029 #endif // PRODUCT
1030 
1031 void ClassLoaderData::verify() {
1032   assert_locked_or_safepoint(_metaspace_lock);
1033   oop cl = class_loader();
1034 
1035   guarantee(this == class_loader_data(cl) || is_unsafe_anonymous(), "Must be the same");
1036   guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_unsafe_anonymous(), "must be");
1037 
1038   // Verify the integrity of the allocated space.
1039   if (metaspace_or_null() != NULL) {
1040     metaspace_or_null()->verify();
1041   }
1042 
1043   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
1044     guarantee(k->class_loader_data() == this, "Must be the same");
1045     k->verify();
1046     assert(k != k->next_link(), "no loops!");
1047   }
1048 }
1049 
1050 bool ClassLoaderData::contains_klass(Klass* klass) {
1051   // Lock-free access requires load_acquire
1052   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
1053     if (k == klass) return true;
1054   }
1055   return false;
1056 }
1057 
1058 
1059 // GC root of class loader data created.
1060 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
1061 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
1062 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
1063 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
1064 
1065 bool ClassLoaderDataGraph::_should_purge = false;
1066 bool ClassLoaderDataGraph::_should_clean_deallocate_lists = false;
1067 bool ClassLoaderDataGraph::_safepoint_cleanup_needed = false;
1068 bool ClassLoaderDataGraph::_metaspace_oom = false;
1069 
1070 // Add a new class loader data node to the list.  Assign the newly created
1071 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
1072 ClassLoaderData* ClassLoaderDataGraph::add_to_graph(Handle loader, bool is_unsafe_anonymous) {
1073   NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the
1074                                      // ClassLoaderData in the graph since the CLD
1075                                      // contains oops in _handles that must be walked.
1076 
1077   ClassLoaderData* cld = new ClassLoaderData(loader, is_unsafe_anonymous);
1078 
1079   if (!is_unsafe_anonymous) {
1080     // First, Atomically set it
1081     ClassLoaderData* old = java_lang_ClassLoader::cmpxchg_loader_data(cld, loader(), NULL);
1082     if (old != NULL) {
1083       delete cld;
1084       // Returns the data.
1085       return old;
1086     }
1087   }
1088 
1089   // We won the race, and therefore the task of adding the data to the list of
1090   // class loader data
1091   ClassLoaderData** list_head = &_head;
1092   ClassLoaderData* next = _head;
1093 
1094   do {
1095     cld->set_next(next);
1096     ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next);
1097     if (exchanged == next) {
1098       LogTarget(Trace, class, loader, data) lt;
1099       if (lt.is_enabled()) {
1100         ResourceMark rm;
1101         LogStream ls(lt);
1102         ls.print("create ");
1103         cld->print_value_on(&ls);
1104         ls.cr();
1105       }
1106       return cld;
1107     }
1108     next = exchanged;
1109   } while (true);
1110 }
1111 
1112 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_unsafe_anonymous) {
1113   ClassLoaderData* loader_data = add_to_graph(loader, is_unsafe_anonymous);
1114   // Initialize _name and _name_and_id after the loader data is added to the
1115   // CLDG because adding the Symbol for _name and _name_and_id might safepoint.
1116   if (loader.not_null()) {
1117     loader_data->initialize_name(loader);
1118   }
1119   return loader_data;
1120 }
1121 
1122 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) {
1123   for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) {
1124     cl->do_cld(cld);
1125   }
1126 }
1127 
1128 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) {
1129   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1130   // Only walk the head until any clds not purged from prior unloading
1131   // (CMS doesn't purge right away).
1132   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1133     assert(cld->is_unloading(), "invariant");
1134     cl->do_cld(cld);
1135   }
1136 }
1137 
1138 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) {
1139   for (ClassLoaderData* cld = _head;  cld != NULL; cld = cld->_next) {
1140     CLDClosure* closure = cld->keep_alive() ? strong : weak;
1141     if (closure != NULL) {
1142       closure->do_cld(cld);
1143     }
1144   }
1145 }
1146 
1147 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) {
1148   if (ClassUnloading) {
1149     roots_cld_do(cl, NULL);
1150   } else {
1151     cld_do(cl);
1152   }
1153 }
1154 
1155 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) {
1156   Thread* thread = Thread::current();
1157   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1158     Handle holder(thread, cld->holder_phantom());
1159     cld->classes_do(klass_closure);
1160   }
1161 }
1162 
1163 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) {
1164   Thread* thread = Thread::current();
1165   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1166     Handle holder(thread, cld->holder_phantom());
1167     cld->classes_do(f);
1168   }
1169 }
1170 
1171 void ClassLoaderDataGraph::methods_do(void f(Method*)) {
1172   Thread* thread = Thread::current();
1173   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1174     Handle holder(thread, cld->holder_phantom());
1175     cld->methods_do(f);
1176   }
1177 }
1178 
1179 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) {
1180   assert_locked_or_safepoint(Module_lock);
1181   Thread* thread = Thread::current();
1182   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1183     Handle holder(thread, cld->holder_phantom());
1184     cld->modules_do(f);
1185   }
1186 }
1187 
1188 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) {
1189   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1190   // Only walk the head until any clds not purged from prior unloading
1191   // (CMS doesn't purge right away).
1192   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1193     assert(cld->is_unloading(), "invariant");
1194     cld->modules_do(f);
1195   }
1196 }
1197 
1198 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) {
1199   assert_locked_or_safepoint(Module_lock);
1200   Thread* thread = Thread::current();
1201   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1202     Handle holder(thread, cld->holder_phantom());
1203     cld->packages_do(f);
1204   }
1205 }
1206 
1207 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) {
1208   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1209   // Only walk the head until any clds not purged from prior unloading
1210   // (CMS doesn't purge right away).
1211   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1212     assert(cld->is_unloading(), "invariant");
1213     cld->packages_do(f);
1214   }
1215 }
1216 
1217 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) {
1218   Thread* thread = Thread::current();
1219   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1220     Handle holder(thread, cld->holder_phantom());
1221     cld->loaded_classes_do(klass_closure);
1222   }
1223 }
1224 
1225 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) {
1226   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1227   // Only walk the head until any clds not purged from prior unloading
1228   // (CMS doesn't purge right away).
1229   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1230     assert(cld->is_unloading(), "invariant");
1231     cld->classes_do(f);
1232   }
1233 }
1234 
1235 #define FOR_ALL_DICTIONARY(X) for (ClassLoaderData* X = _head; X != NULL; X = X->next()) \
1236                                 if (X->dictionary() != NULL)
1237 
1238 // Walk classes in the loaded class dictionaries in various forms.
1239 // Only walks the classes defined in this class loader.
1240 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) {
1241   Thread* thread = Thread::current();
1242   FOR_ALL_DICTIONARY(cld) {
1243     Handle holder(thread, cld->holder_phantom());
1244     cld->dictionary()->classes_do(f);
1245   }
1246 }
1247 
1248 // Only walks the classes defined in this class loader.
1249 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) {
1250   Thread* thread = Thread::current();
1251   FOR_ALL_DICTIONARY(cld) {
1252     Handle holder(thread, cld->holder_phantom());
1253     cld->dictionary()->classes_do(f, CHECK);
1254   }
1255 }
1256 
1257 void ClassLoaderDataGraph::verify_dictionary() {
1258   FOR_ALL_DICTIONARY(cld) {
1259     cld->dictionary()->verify();
1260   }
1261 }
1262 
1263 void ClassLoaderDataGraph::print_dictionary(outputStream* st) {
1264   FOR_ALL_DICTIONARY(cld) {
1265     st->print("Dictionary for ");
1266     cld->print_value_on(st);
1267     st->cr();
1268     cld->dictionary()->print_on(st);
1269     st->cr();
1270   }
1271 }
1272 
1273 void ClassLoaderDataGraph::print_dictionary_statistics(outputStream* st) {
1274   FOR_ALL_DICTIONARY(cld) {
1275     ResourceMark rm;
1276     stringStream tempst;
1277     tempst.print("System Dictionary for %s class loader", cld->loader_name_and_id());
1278     cld->dictionary()->print_table_statistics(st, tempst.as_string());
1279   }
1280 }
1281 
1282 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() {
1283   assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?");
1284 
1285   GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>();
1286 
1287   // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true);
1288   ClassLoaderData* curr = _head;
1289   while (curr != _saved_head) {
1290     if (!curr->claimed()) {
1291       array->push(curr);
1292       LogTarget(Debug, class, loader, data) lt;
1293       if (lt.is_enabled()) {
1294         LogStream ls(lt);
1295         ls.print("found new CLD: ");
1296         curr->print_value_on(&ls);
1297         ls.cr();
1298       }
1299     }
1300 
1301     curr = curr->_next;
1302   }
1303 
1304   return array;
1305 }
1306 
1307 #ifndef PRODUCT
1308 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) {
1309   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1310     if (loader_data == data) {
1311       return true;
1312     }
1313   }
1314 
1315   return false;
1316 }
1317 #endif // PRODUCT
1318 
1319 #if INCLUDE_JFR
1320 static Ticks class_unload_time;
1321 static void post_class_unload_event(Klass* const k) {
1322   assert(k != NULL, "invariant");
1323   EventClassUnload event(UNTIMED);
1324   event.set_endtime(class_unload_time);
1325   event.set_unloadedClass(k);
1326   event.set_definingClassLoader(k->class_loader_data());
1327   event.commit();
1328 }
1329 
1330 static void post_class_unload_events() {
1331   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1332   if (Jfr::is_enabled()) {
1333     if (EventClassUnload::is_enabled()) {
1334       class_unload_time = Ticks::now();
1335       ClassLoaderDataGraph::classes_unloading_do(&post_class_unload_event);
1336     }
1337     Jfr::on_unloading_classes();
1338   }
1339 }
1340 #endif // INCLUDE_JFR
1341 
1342 // Move class loader data from main list to the unloaded list for unloading
1343 // and deallocation later.
1344 bool ClassLoaderDataGraph::do_unloading(bool do_cleaning) {
1345 
1346   // Indicate whether safepoint cleanup is needed.
1347   _safepoint_cleanup_needed |= do_cleaning;
1348 
1349   ClassLoaderData* data = _head;
1350   ClassLoaderData* prev = NULL;
1351   bool seen_dead_loader = false;
1352   uint loaders_processed = 0;
1353   uint loaders_removed = 0;
1354 
1355   // Save previous _unloading pointer for CMS which may add to unloading list before
1356   // purging and we don't want to rewalk the previously unloaded class loader data.
1357   _saved_unloading = _unloading;
1358 
1359   data = _head;
1360   while (data != NULL) {
1361     if (data->is_alive()) {
1362       prev = data;
1363       data = data->next();
1364       loaders_processed++;
1365       continue;
1366     }
1367     seen_dead_loader = true;
1368     loaders_removed++;
1369     ClassLoaderData* dead = data;
1370     dead->unload();
1371     data = data->next();
1372     // Remove from loader list.
1373     // This class loader data will no longer be found
1374     // in the ClassLoaderDataGraph.
1375     if (prev != NULL) {
1376       prev->set_next(data);
1377     } else {
1378       assert(dead == _head, "sanity check");
1379       _head = data;
1380     }
1381     dead->set_next(_unloading);
1382     _unloading = dead;
1383   }
1384 
1385   if (seen_dead_loader) {
1386     JFR_ONLY(post_class_unload_events();)
1387   }
1388 
1389   log_debug(class, loader, data)("do_unloading: loaders processed %u, loaders removed %u", loaders_processed, loaders_removed);
1390 
1391   return seen_dead_loader;
1392 }
1393 
1394 // There's at least one dead class loader.  Purge refererences of healthy module
1395 // reads lists and package export lists to modules belonging to dead loaders.
1396 void ClassLoaderDataGraph::clean_module_and_package_info() {
1397   ClassLoaderData* data = _head;
1398   while (data != NULL) {
1399     // Remove entries in the dictionary of live class loader that have
1400     // initiated loading classes in a dead class loader.
1401     if (data->dictionary() != NULL) {
1402       data->dictionary()->do_unloading();
1403     }
1404     // Walk a ModuleEntry's reads, and a PackageEntry's exports
1405     // lists to determine if there are modules on those lists that are now
1406     // dead and should be removed.  A module's life cycle is equivalent
1407     // to its defining class loader's life cycle.  Since a module is
1408     // considered dead if its class loader is dead, these walks must
1409     // occur after each class loader's aliveness is determined.
1410     if (data->packages() != NULL) {
1411       data->packages()->purge_all_package_exports();
1412     }
1413     if (data->modules_defined()) {
1414       data->modules()->purge_all_module_reads();
1415     }
1416     data = data->next();
1417   }
1418 }
1419 
1420 void ClassLoaderDataGraph::purge() {
1421   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1422   ClassLoaderData* list = _unloading;
1423   _unloading = NULL;
1424   ClassLoaderData* next = list;
1425   bool classes_unloaded = false;
1426   while (next != NULL) {
1427     ClassLoaderData* purge_me = next;
1428     next = purge_me->next();
1429     delete purge_me;
1430     classes_unloaded = true;
1431   }
1432   if (classes_unloaded) {
1433     Metaspace::purge();
1434     set_metaspace_oom(false);
1435   }
1436 }
1437 
1438 int ClassLoaderDataGraph::resize_if_needed() {
1439   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1440   int resized = 0;
1441   if (Dictionary::does_any_dictionary_needs_resizing()) {
1442     FOR_ALL_DICTIONARY(cld) {
1443       if (cld->dictionary()->resize_if_needed()) {
1444         resized++;
1445       }
1446     }
1447   }
1448   return resized;
1449 }
1450 
1451 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic()
1452     : _next_klass(NULL) {
1453   ClassLoaderData* cld = ClassLoaderDataGraph::_head;
1454   Klass* klass = NULL;
1455 
1456   // Find the first klass in the CLDG.
1457   while (cld != NULL) {
1458     assert_locked_or_safepoint(cld->metaspace_lock());
1459     klass = cld->_klasses;
1460     if (klass != NULL) {
1461       _next_klass = klass;
1462       return;
1463     }
1464     cld = cld->next();
1465   }
1466 }
1467 
1468 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) {
1469   Klass* next = klass->next_link();
1470   if (next != NULL) {
1471     return next;
1472   }
1473 
1474   // No more klasses in the current CLD. Time to find a new CLD.
1475   ClassLoaderData* cld = klass->class_loader_data();
1476   assert_locked_or_safepoint(cld->metaspace_lock());
1477   while (next == NULL) {
1478     cld = cld->next();
1479     if (cld == NULL) {
1480       break;
1481     }
1482     next = cld->_klasses;
1483   }
1484 
1485   return next;
1486 }
1487 
1488 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() {
1489   Klass* head = _next_klass;
1490 
1491   while (head != NULL) {
1492     Klass* next = next_klass_in_cldg(head);
1493 
1494     Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head);
1495 
1496     if (old_head == head) {
1497       return head; // Won the CAS.
1498     }
1499 
1500     head = old_head;
1501   }
1502 
1503   // Nothing more for the iterator to hand out.
1504   assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1505   return NULL;
1506 }
1507 
1508 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() {
1509   _data = ClassLoaderDataGraph::_head;
1510 }
1511 
1512 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {}
1513 
1514 #ifndef PRODUCT
1515 // callable from debugger
1516 extern "C" int print_loader_data_graph() {
1517   ResourceMark rm;
1518   ClassLoaderDataGraph::print_on(tty);
1519   return 0;
1520 }
1521 
1522 void ClassLoaderDataGraph::verify() {
1523   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1524     data->verify();
1525   }
1526 }
1527 
1528 void ClassLoaderDataGraph::print_on(outputStream * const out) {
1529   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1530     data->print_on(out);
1531   }
1532 }
1533 #endif // PRODUCT