1  /*
   2  * Copyright (c) 2012, 2020, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 // A ClassLoaderData identifies the full set of class types that a class
  26 // loader's name resolution strategy produces for a given configuration of the
  27 // class loader.
  28 // Class types in the ClassLoaderData may be defined by from class file binaries
  29 // provided by the class loader, or from other class loader it interacts with
  30 // according to its name resolution strategy.
  31 //
  32 // Class loaders that implement a deterministic name resolution strategy
  33 // (including with respect to their delegation behavior), such as the boot, the
  34 // platform, and the system loaders of the JDK's built-in class loader
  35 // hierarchy, always produce the same linkset for a given configuration.
  36 //
  37 // ClassLoaderData carries information related to a linkset (e.g.,
  38 // metaspace holding its klass definitions).
  39 // The System Dictionary and related data structures (e.g., placeholder table,
  40 // loader constraints table) as well as the runtime representation of classes
  41 // only reference ClassLoaderData.
  42 //
  43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that
  44 // that represent the loader's "linking domain" in the JVM.
  45 //
  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"
  50 #include "classfile/classLoaderData.inline.hpp"
  51 #include "classfile/classLoaderDataGraph.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "logging/log.hpp"
  59 #include "logging/logStream.hpp"
  60 #include "memory/allocation.inline.hpp"
  61 #include "memory/metadataFactory.hpp"
  62 #include "memory/metaspaceShared.hpp"
  63 #include "memory/iterator.hpp"
  64 #include "memory/resourceArea.hpp"
  65 #include "memory/universe.hpp"
  66 #include "oops/access.inline.hpp"
  67 #include "oops/array.hpp"
  68 #include "oops/oop.inline.hpp"
  69 #include "oops/oopHandle.inline.hpp"
  70 #include "oops/weakHandle.inline.hpp"
  71 #include "runtime/atomic.hpp"
  72 #include "runtime/handles.inline.hpp"
  73 #include "runtime/mutex.hpp"
  74 #include "runtime/safepoint.hpp"
  75 #include "utilities/growableArray.hpp"
  76 #include "utilities/macros.hpp"
  77 #include "utilities/ostream.hpp"
  78 
  79 
  80 #if INCLUDE_CDS_JAVA_HEAP
  81 // Support for archiving full module graph in CDS
  82 
  83 class ArchivedClassLoaderData {
  84   Array<PackageEntry*>* _packages;
  85   Array<ModuleEntry*>* _modules;
  86 
  87 public:
  88   ArchivedClassLoaderData() : _packages(NULL), _modules(NULL) {}
  89 
  90   void allocate(ClassLoaderData* loader_data);
  91   void init_archived_entries(ClassLoaderData* loader_data);
  92   void init_archived_oops(ClassLoaderData* loader_data);
  93 
  94   void serialize(SerializeClosure* f) {
  95     f->do_ptr((void**)&_packages);
  96     f->do_ptr((void**)&_modules);
  97   }
  98 
  99   void load_archived_entries(ClassLoaderData* loader_data);
 100   void restore_archived_oops(ClassLoaderData* loader_data);
 101 };
 102 
 103 static ArchivedClassLoaderData _archived_boot_loader_data;
 104 static ArchivedClassLoaderData _archived_platform_loader_data;
 105 static ArchivedClassLoaderData _archived_system_loader_data;
 106 static ModuleEntry* _archived_javabase_moduleEntry = NULL;
 107 
 108 void ArchivedClassLoaderData::allocate(ClassLoaderData* loader_data) {
 109   assert(DumpSharedSpaces, "must be");
 110 
 111   // We can't create a hashtable at dump time because the hashcode dependes on the
 112   // address of the Symbols, which may be relocated at run time due to ASLR.
 113   if (loader_data) {
 114     _packages = loader_data->packages()->allocate_archived_entries();
 115     _modules  = loader_data->modules() ->allocate_archived_entries();
 116   }
 117 }
 118 
 119 void ArchivedClassLoaderData::init_archived_entries(ClassLoaderData* loader_data) {
 120   assert(DumpSharedSpaces, "must be");
 121   if (loader_data) {
 122     loader_data->packages()->init_archived_entries(_packages);
 123     loader_data->modules() ->init_archived_entries(_modules);
 124   }
 125 }
 126 
 127 void ArchivedClassLoaderData::init_archived_oops(ClassLoaderData* loader_data) {
 128   assert(DumpSharedSpaces, "must be");
 129   if (loader_data) {
 130     loader_data->modules()->init_archived_oops(_modules);
 131   }
 132 }
 133 
 134 void ArchivedClassLoaderData::load_archived_entries(ClassLoaderData* loader_data) {
 135   assert(UseSharedSpaces, "must be");
 136   if (_modules) {
 137     loader_data->modules()->load_archived_entries(loader_data, _modules);
 138     loader_data->packages()->load_archived_entries(_packages);
 139   }
 140 }
 141 
 142 void ArchivedClassLoaderData::restore_archived_oops(ClassLoaderData* loader_data) {
 143   assert(UseSharedSpaces, "must be");
 144   if (_modules) {
 145     loader_data->modules()->restore_archived_oops(loader_data, _modules);
 146   }
 147 }
 148 
 149 // ------------------------------
 150 
 151 void ClassLoaderData::allocate_archived_tables() {
 152   assert(DumpSharedSpaces, "must be");
 153   if (MetaspaceShared::use_full_module_graph()) {
 154     _archived_boot_loader_data.allocate    (_the_null_class_loader_data);
 155     _archived_platform_loader_data.allocate(class_loader_data_or_null(SystemDictionary::java_platform_loader()));
 156     _archived_system_loader_data.allocate  (class_loader_data_or_null(SystemDictionary::java_system_loader()));
 157   }
 158 }
 159 
 160 void ClassLoaderData::init_archived_tables() {
 161   assert(DumpSharedSpaces, "must be");
 162   if (MetaspaceShared::use_full_module_graph()) {
 163     _archived_boot_loader_data.init_archived_entries    (_the_null_class_loader_data);
 164     _archived_platform_loader_data.init_archived_entries(class_loader_data_or_null(SystemDictionary::java_platform_loader()));
 165     _archived_system_loader_data.init_archived_entries  (class_loader_data_or_null(SystemDictionary::java_system_loader()));
 166     _archived_javabase_moduleEntry = ModuleEntry::get_archived_entry(ModuleEntryTable::javabase_moduleEntry());
 167   }
 168 }
 169 
 170 void ClassLoaderData::init_archived_oops() {
 171   assert(DumpSharedSpaces, "must be");
 172   if (MetaspaceShared::use_full_module_graph()) {
 173     _archived_boot_loader_data.init_archived_oops    (_the_null_class_loader_data);
 174     _archived_platform_loader_data.init_archived_oops(class_loader_data_or_null(SystemDictionary::java_platform_loader()));
 175     _archived_system_loader_data.init_archived_oops  (class_loader_data_or_null(SystemDictionary::java_system_loader()));
 176   }
 177 }
 178 
 179 void ClassLoaderData::serialize(class SerializeClosure* f) {
 180   _archived_boot_loader_data.serialize(f);
 181   _archived_platform_loader_data.serialize(f);
 182   _archived_system_loader_data.serialize(f);
 183   f->do_ptr((void**)&_archived_javabase_moduleEntry);
 184 
 185   if (f->reading() && MetaspaceShared::use_full_module_graph()) {
 186     // Must be done before ClassLoader::create_javabase()
 187     _archived_boot_loader_data.load_archived_entries(_the_null_class_loader_data);
 188     ModuleEntryTable::set_javabase_moduleEntry(_archived_javabase_moduleEntry);
 189     log_info(cds)("use_full_module_graph = true; java.base = " INTPTR_FORMAT,
 190                   p2i(_archived_javabase_moduleEntry));
 191   }
 192 }
 193 
 194 void ClassLoaderData::restore_archived_oops_for_null_class_loader_data() {
 195   assert(UseSharedSpaces, "must be");
 196   if (MetaspaceShared::use_full_module_graph()) {
 197     _archived_boot_loader_data.restore_archived_oops(_the_null_class_loader_data);
 198   }
 199 }
 200 
 201 void ClassLoaderData::restore_java_platform_loader_from_archive() {
 202   assert(UseSharedSpaces, "must be");
 203   assert(MetaspaceShared::use_full_module_graph(), "must be");
 204   assert(class_loader() == SystemDictionary::java_platform_loader(), "must be");
 205   _archived_platform_loader_data.load_archived_entries(this);
 206   _archived_platform_loader_data.restore_archived_oops(this);
 207 }
 208 
 209 void ClassLoaderData::restore_java_system_loader_from_archive() {
 210   assert(UseSharedSpaces, "must be");
 211   assert(MetaspaceShared::use_full_module_graph(), "must be");
 212   assert(class_loader() == SystemDictionary::java_system_loader(), "must be");
 213   _archived_system_loader_data.load_archived_entries(this);
 214   _archived_system_loader_data.restore_archived_oops(this);
 215 }
 216 
 217 #endif // INCLUDE_JAVA_HEAP
 218 
 219 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
 220 
 221 void ClassLoaderData::init_null_class_loader_data() {
 222   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
 223   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
 224 
 225   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
 226   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
 227   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
 228 
 229   LogTarget(Trace, class, loader, data) lt;
 230   if (lt.is_enabled()) {
 231     ResourceMark rm;
 232     LogStream ls(lt);
 233     ls.print("create ");
 234     _the_null_class_loader_data->print_value_on(&ls);
 235     ls.cr();
 236   }
 237 }
 238 
 239 // Obtain and set the class loader's name within the ClassLoaderData so
 240 // it will be available for error messages, logging, JFR, etc.  The name
 241 // and klass are available after the class_loader oop is no longer alive,
 242 // during unloading.
 243 void ClassLoaderData::initialize_name(Handle class_loader) {
 244   Thread* THREAD = Thread::current();
 245   ResourceMark rm(THREAD);
 246 
 247   // Obtain the class loader's name.  If the class loader's name was not
 248   // explicitly set during construction, the CLD's _name field will be null.
 249   oop cl_name = java_lang_ClassLoader::name(class_loader());
 250   if (cl_name != NULL) {
 251     const char* cl_instance_name = java_lang_String::as_utf8_string(cl_name);
 252 
 253     if (cl_instance_name != NULL && cl_instance_name[0] != '\0') {
 254       _name = SymbolTable::new_symbol(cl_instance_name);
 255     }
 256   }
 257 
 258   // Obtain the class loader's name and identity hash.  If the class loader's
 259   // name was not explicitly set during construction, the class loader's name and id
 260   // will be set to the qualified class name of the class loader along with its
 261   // identity hash.
 262   // If for some reason the ClassLoader's constructor has not been run, instead of
 263   // leaving the _name_and_id field null, fall back to the external qualified class
 264   // name.  Thus CLD's _name_and_id field should never have a null value.
 265   oop cl_name_and_id = java_lang_ClassLoader::nameAndId(class_loader());
 266   const char* cl_instance_name_and_id =
 267                   (cl_name_and_id == NULL) ? _class_loader_klass->external_name() :
 268                                              java_lang_String::as_utf8_string(cl_name_and_id);
 269   assert(cl_instance_name_and_id != NULL && cl_instance_name_and_id[0] != '\0', "class loader has no name and id");
 270   _name_and_id = SymbolTable::new_symbol(cl_instance_name_and_id);
 271 }
 272 
 273 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool has_class_mirror_holder) :
 274   _metaspace(NULL),
 275   _metaspace_lock(new Mutex(Mutex::leaf+1, "Metaspace allocation lock", true,
 276                             Mutex::_safepoint_check_never)),
 277   _unloading(false), _has_class_mirror_holder(has_class_mirror_holder),
 278   _modified_oops(true), _accumulated_modified_oops(false),
 279   // An unsafe anonymous class loader data doesn't have anything to keep
 280   // it from being unloaded during parsing of the unsafe anonymous class.
 281   // The null-class-loader should always be kept alive.
 282   _keep_alive((has_class_mirror_holder || h_class_loader.is_null()) ? 1 : 0),
 283   _claim(0),
 284   _handles(),
 285   _klasses(NULL), _packages(NULL), _modules(NULL), _unnamed_module(NULL), _dictionary(NULL),
 286   _jmethod_ids(NULL),
 287   _deallocate_list(NULL),
 288   _next(NULL),
 289   _class_loader_klass(NULL), _name(NULL), _name_and_id(NULL) {
 290 
 291   if (!h_class_loader.is_null()) {
 292     _class_loader = _handles.add(h_class_loader());
 293     _class_loader_klass = h_class_loader->klass();
 294     initialize_name(h_class_loader);
 295   }
 296 
 297   if (!has_class_mirror_holder) {
 298     // The holder is initialized later for non-strong hidden classes and unsafe anonymous classes,
 299     // and before calling anything that call class_loader().
 300     initialize_holder(h_class_loader);
 301 
 302     // A ClassLoaderData created solely for a non-strong hidden class or unsafe anonymous class should
 303     // never have a ModuleEntryTable or PackageEntryTable created for it. The defining package
 304     // and module for an unsafe anonymous class will be found in its host class.
 305     _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size);
 306     if (h_class_loader.is_null()) {
 307       // Create unnamed module for boot loader
 308       _unnamed_module = ModuleEntry::create_boot_unnamed_module(this);
 309     } else {
 310       // Create unnamed module for all other loaders
 311       _unnamed_module = ModuleEntry::create_unnamed_module(this);
 312     }
 313     _dictionary = create_dictionary();
 314   }
 315 
 316   NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies
 317 
 318   JFR_ONLY(INIT_ID(this);)
 319 }
 320 
 321 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
 322   Chunk* c = _head;
 323   while (c != NULL) {
 324     Chunk* next = c->_next;
 325     delete c;
 326     c = next;
 327   }
 328 }
 329 
 330 OopHandle ClassLoaderData::ChunkedHandleList::add(oop o) {
 331   if (_head == NULL || _head->_size == Chunk::CAPACITY) {
 332     Chunk* next = new Chunk(_head);
 333     Atomic::release_store(&_head, next);
 334   }
 335   oop* handle = &_head->_data[_head->_size];
 336   NativeAccess<IS_DEST_UNINITIALIZED>::oop_store(handle, o);
 337   Atomic::release_store(&_head->_size, _head->_size + 1);
 338   return OopHandle(handle);
 339 }
 340 
 341 int ClassLoaderData::ChunkedHandleList::count() const {
 342   int count = 0;
 343   Chunk* chunk = _head;
 344   while (chunk != NULL) {
 345     count += chunk->_size;
 346     chunk = chunk->_next;
 347   }
 348   return count;
 349 }
 350 
 351 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
 352   for (juint i = 0; i < size; i++) {
 353     if (c->_data[i] != NULL) {
 354       f->do_oop(&c->_data[i]);
 355     }
 356   }
 357 }
 358 
 359 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
 360   Chunk* head = Atomic::load_acquire(&_head);
 361   if (head != NULL) {
 362     // Must be careful when reading size of head
 363     oops_do_chunk(f, head, Atomic::load_acquire(&head->_size));
 364     for (Chunk* c = head->_next; c != NULL; c = c->_next) {
 365       oops_do_chunk(f, c, c->_size);
 366     }
 367   }
 368 }
 369 
 370 class VerifyContainsOopClosure : public OopClosure {
 371   oop  _target;
 372   bool _found;
 373 
 374  public:
 375   VerifyContainsOopClosure(oop target) : _target(target), _found(false) {}
 376 
 377   void do_oop(oop* p) {
 378     if (p != NULL && NativeAccess<AS_NO_KEEPALIVE>::oop_load(p) == _target) {
 379       _found = true;
 380     }
 381   }
 382 
 383   void do_oop(narrowOop* p) {
 384     // The ChunkedHandleList should not contain any narrowOop
 385     ShouldNotReachHere();
 386   }
 387 
 388   bool found() const {
 389     return _found;
 390   }
 391 };
 392 
 393 bool ClassLoaderData::ChunkedHandleList::contains(oop p) {
 394   VerifyContainsOopClosure cl(p);
 395   oops_do(&cl);
 396   return cl.found();
 397 }
 398 
 399 #ifndef PRODUCT
 400 bool ClassLoaderData::ChunkedHandleList::owner_of(oop* oop_handle) {
 401   Chunk* chunk = _head;
 402   while (chunk != NULL) {
 403     if (&(chunk->_data[0]) <= oop_handle && oop_handle < &(chunk->_data[chunk->_size])) {
 404       return true;
 405     }
 406     chunk = chunk->_next;
 407   }
 408   return false;
 409 }
 410 #endif // PRODUCT
 411 
 412 void ClassLoaderData::clear_claim(int claim) {
 413   for (;;) {
 414     int old_claim = Atomic::load(&_claim);
 415     if ((old_claim & claim) == 0) {
 416       return;
 417     }
 418     int new_claim = old_claim & ~claim;
 419     if (Atomic::cmpxchg(&_claim, old_claim, new_claim) == old_claim) {
 420       return;
 421     }
 422   }
 423 }
 424 
 425 bool ClassLoaderData::try_claim(int claim) {
 426   for (;;) {
 427     int old_claim = Atomic::load(&_claim);
 428     if ((old_claim & claim) == claim) {
 429       return false;
 430     }
 431     int new_claim = old_claim | claim;
 432     if (Atomic::cmpxchg(&_claim, old_claim, new_claim) == old_claim) {
 433       return true;
 434     }
 435   }
 436 }
 437 
 438 // Weak hidden and unsafe anonymous classes have their own ClassLoaderData that is marked to keep alive
 439 // while the class is being parsed, and if the class appears on the module fixup list.
 440 // Due to the uniqueness that no other class shares the hidden or unsafe anonymous class' name or
 441 // ClassLoaderData, no other non-GC thread has knowledge of the hidden or unsafe anonymous class while
 442 // it is being defined, therefore _keep_alive is not volatile or atomic.
 443 void ClassLoaderData::inc_keep_alive() {
 444   if (has_class_mirror_holder()) {
 445     assert(_keep_alive > 0, "Invalid keep alive increment count");
 446     _keep_alive++;
 447   }
 448 }
 449 
 450 void ClassLoaderData::dec_keep_alive() {
 451   if (has_class_mirror_holder()) {
 452     assert(_keep_alive > 0, "Invalid keep alive decrement count");
 453     _keep_alive--;
 454   }
 455 }
 456 
 457 void ClassLoaderData::oops_do(OopClosure* f, int claim_value, bool clear_mod_oops) {
 458   if (claim_value != ClassLoaderData::_claim_none && !try_claim(claim_value)) {
 459     return;
 460   }
 461 
 462   // Only clear modified_oops after the ClassLoaderData is claimed.
 463   if (clear_mod_oops) {
 464     clear_modified_oops();
 465   }
 466 
 467   _handles.oops_do(f);
 468 }
 469 
 470 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
 471   // Lock-free access requires load_acquire
 472   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 473     klass_closure->do_klass(k);
 474     assert(k != k->next_link(), "no loops!");
 475   }
 476 }
 477 
 478 void ClassLoaderData::classes_do(void f(Klass * const)) {
 479   // Lock-free access requires load_acquire
 480   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 481     f(k);
 482     assert(k != k->next_link(), "no loops!");
 483   }
 484 }
 485 
 486 void ClassLoaderData::methods_do(void f(Method*)) {
 487   // Lock-free access requires load_acquire
 488   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 489     if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
 490       InstanceKlass::cast(k)->methods_do(f);
 491     }
 492   }
 493 }
 494 
 495 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
 496   // Lock-free access requires load_acquire
 497   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 498     // Do not filter ArrayKlass oops here...
 499     if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
 500 #ifdef ASSERT
 501       oop m = k->java_mirror();
 502       assert(m != NULL, "NULL mirror");
 503       assert(m->is_a(SystemDictionary::Class_klass()), "invalid mirror");
 504 #endif
 505       klass_closure->do_klass(k);
 506     }
 507   }
 508 }
 509 
 510 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 511   // Lock-free access requires load_acquire
 512   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 513     if (k->is_instance_klass()) {
 514       f(InstanceKlass::cast(k));
 515     }
 516     assert(k != k->next_link(), "no loops!");
 517   }
 518 }
 519 
 520 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 521   assert_locked_or_safepoint(Module_lock);
 522   if (_unnamed_module != NULL) {
 523     f(_unnamed_module);
 524   }
 525   if (_modules != NULL) {
 526     for (int i = 0; i < _modules->table_size(); i++) {
 527       for (ModuleEntry* entry = _modules->bucket(i);
 528            entry != NULL;
 529            entry = entry->next()) {
 530         f(entry);
 531       }
 532     }
 533   }
 534 }
 535 
 536 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 537   assert_locked_or_safepoint(Module_lock);
 538   if (_packages != NULL) {
 539     for (int i = 0; i < _packages->table_size(); i++) {
 540       for (PackageEntry* entry = _packages->bucket(i);
 541            entry != NULL;
 542            entry = entry->next()) {
 543         f(entry);
 544       }
 545     }
 546   }
 547 }
 548 
 549 void ClassLoaderData::record_dependency(const Klass* k) {
 550   assert(k != NULL, "invariant");
 551 
 552   ClassLoaderData * const from_cld = this;
 553   ClassLoaderData * const to_cld = k->class_loader_data();
 554 
 555   // Do not need to record dependency if the dependency is to a class whose
 556   // class loader data is never freed.  (i.e. the dependency's class loader
 557   // is one of the three builtin class loaders and the dependency's class
 558   // loader data has a ClassLoader holder, not a Class holder.)
 559   if (to_cld->is_permanent_class_loader_data()) {
 560     return;
 561   }
 562 
 563   oop to;
 564   if (to_cld->has_class_mirror_holder()) {
 565     // Just return if a non-strong hidden class or unsafe anonymous class is attempting to record a dependency
 566     // to itself.  (Note that every non-strong hidden class or unsafe anonymous class has its own unique class
 567     // loader data.)
 568     if (to_cld == from_cld) {
 569       return;
 570     }
 571     // Hidden and unsafe anonymous class dependencies are through the mirror.
 572     to = k->java_mirror();
 573   } else {
 574     to = to_cld->class_loader();
 575     oop from = from_cld->class_loader();
 576 
 577     // Just return if this dependency is to a class with the same or a parent
 578     // class_loader.
 579     if (from == to || java_lang_ClassLoader::isAncestor(from, to)) {
 580       return; // this class loader is in the parent list, no need to add it.
 581     }
 582   }
 583 
 584   // It's a dependency we won't find through GC, add it.
 585   if (!_handles.contains(to)) {
 586     NOT_PRODUCT(Atomic::inc(&_dependency_count));
 587     LogTarget(Trace, class, loader, data) lt;
 588     if (lt.is_enabled()) {
 589       ResourceMark rm;
 590       LogStream ls(lt);
 591       ls.print("adding dependency from ");
 592       print_value_on(&ls);
 593       ls.print(" to ");
 594       to_cld->print_value_on(&ls);
 595       ls.cr();
 596     }
 597     Handle dependency(Thread::current(), to);
 598     add_handle(dependency);
 599     // Added a potentially young gen oop to the ClassLoaderData
 600     record_modified_oops();
 601   }
 602 }
 603 
 604 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
 605   {
 606     MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 607     Klass* old_value = _klasses;
 608     k->set_next_link(old_value);
 609     // Link the new item into the list, making sure the linked class is stable
 610     // since the list can be walked without a lock
 611     Atomic::release_store(&_klasses, k);
 612     if (k->is_array_klass()) {
 613       ClassLoaderDataGraph::inc_array_classes(1);
 614     } else {
 615       ClassLoaderDataGraph::inc_instance_classes(1);
 616     }
 617   }
 618 
 619   if (publicize) {
 620     LogTarget(Trace, class, loader, data) lt;
 621     if (lt.is_enabled()) {
 622       ResourceMark rm;
 623       LogStream ls(lt);
 624       ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name());
 625       print_value_on(&ls);
 626       ls.cr();
 627     }
 628   }
 629 }
 630 
 631 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) {
 632   if (loader_or_mirror() != NULL) {
 633     assert(_holder.is_null(), "never replace holders");
 634     _holder = WeakHandle(Universe::vm_weak(), loader_or_mirror);
 635   }
 636 }
 637 
 638 // Remove a klass from the _klasses list for scratch_class during redefinition
 639 // or parsed class in the case of an error.
 640 void ClassLoaderData::remove_class(Klass* scratch_class) {
 641   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
 642 
 643   // Adjust global class iterator.
 644   ClassLoaderDataGraph::adjust_saved_class(scratch_class);
 645 
 646   Klass* prev = NULL;
 647   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 648     if (k == scratch_class) {
 649       if (prev == NULL) {
 650         _klasses = k->next_link();
 651       } else {
 652         Klass* next = k->next_link();
 653         prev->set_next_link(next);
 654       }
 655 
 656       if (k->is_array_klass()) {
 657         ClassLoaderDataGraph::dec_array_classes(1);
 658       } else {
 659         ClassLoaderDataGraph::dec_instance_classes(1);
 660       }
 661 
 662       return;
 663     }
 664     prev = k;
 665     assert(k != k->next_link(), "no loops!");
 666   }
 667   ShouldNotReachHere();   // should have found this class!!
 668 }
 669 
 670 void ClassLoaderData::unload() {
 671   _unloading = true;
 672 
 673   LogTarget(Trace, class, loader, data) lt;
 674   if (lt.is_enabled()) {
 675     ResourceMark rm;
 676     LogStream ls(lt);
 677     ls.print("unload");
 678     print_value_on(&ls);
 679     ls.cr();
 680   }
 681 
 682   // Some items on the _deallocate_list need to free their C heap structures
 683   // if they are not already on the _klasses list.
 684   free_deallocate_list_C_heap_structures();
 685 
 686   // Clean up class dependencies and tell serviceability tools
 687   // these classes are unloading.  Must be called
 688   // after erroneous classes are released.
 689   classes_do(InstanceKlass::unload_class);
 690 
 691   // Clean up global class iterator for compiler
 692   ClassLoaderDataGraph::adjust_saved_class(this);
 693 }
 694 
 695 ModuleEntryTable* ClassLoaderData::modules() {
 696   // Lazily create the module entry table at first request.
 697   // Lock-free access requires load_acquire.
 698   ModuleEntryTable* modules = Atomic::load_acquire(&_modules);
 699   if (modules == NULL) {
 700     MutexLocker m1(Module_lock);
 701     // Check if _modules got allocated while we were waiting for this lock.
 702     if ((modules = _modules) == NULL) {
 703       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 704 
 705       {
 706         MutexLocker m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 707         // Ensure _modules is stable, since it is examined without a lock
 708         Atomic::release_store(&_modules, modules);
 709       }
 710     }
 711   }
 712   return modules;
 713 }
 714 
 715 const int _boot_loader_dictionary_size    = 1009;
 716 const int _default_loader_dictionary_size = 107;
 717 
 718 Dictionary* ClassLoaderData::create_dictionary() {
 719   assert(!has_class_mirror_holder(), "class mirror holder cld does not have a dictionary");
 720   int size;
 721   bool resizable = false;
 722   if (_the_null_class_loader_data == NULL) {
 723     size = _boot_loader_dictionary_size;
 724     resizable = true;
 725   } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 726     size = 1;  // there's only one class in relection class loader and no initiated classes
 727   } else if (is_system_class_loader_data()) {
 728     size = _boot_loader_dictionary_size;
 729     resizable = true;
 730   } else {
 731     size = _default_loader_dictionary_size;
 732     resizable = true;
 733   }
 734   if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces) {
 735     resizable = false;
 736   }
 737   return new Dictionary(this, size, resizable);
 738 }
 739 
 740 // Tell the GC to keep this klass alive while iterating ClassLoaderDataGraph
 741 oop ClassLoaderData::holder_phantom() const {
 742   // A klass that was previously considered dead can be looked up in the
 743   // CLD/SD, and its _java_mirror or _class_loader can be stored in a root
 744   // or a reachable object making it alive again. The SATB part of G1 needs
 745   // to get notified about this potential resurrection, otherwise the marking
 746   // might not find the object.
 747   if (!_holder.is_null()) {  // NULL class_loader
 748     return _holder.resolve();
 749   } else {
 750     return NULL;
 751   }
 752 }
 753 
 754 // Let the GC read the holder without keeping it alive.
 755 oop ClassLoaderData::holder_no_keepalive() const {
 756   if (!_holder.is_null()) {  // NULL class_loader
 757     return _holder.peek();
 758   } else {
 759     return NULL;
 760   }
 761 }
 762 
 763 // Unloading support
 764 bool ClassLoaderData::is_alive() const {
 765   bool alive = keep_alive()         // null class loader and incomplete non-strong hidden class or unsafe anonymous class.
 766       || (_holder.peek() != NULL);  // and not cleaned by the GC weak handle processing.
 767 
 768   return alive;
 769 }
 770 
 771 class ReleaseKlassClosure: public KlassClosure {
 772 private:
 773   size_t  _instance_class_released;
 774   size_t  _array_class_released;
 775 public:
 776   ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { }
 777 
 778   size_t instance_class_released() const { return _instance_class_released; }
 779   size_t array_class_released()    const { return _array_class_released;    }
 780 
 781   void do_klass(Klass* k) {
 782     if (k->is_array_klass()) {
 783       _array_class_released ++;
 784     } else {
 785       assert(k->is_instance_klass(), "Must be");
 786       _instance_class_released ++;
 787     }
 788     k->release_C_heap_structures();
 789   }
 790 };
 791 
 792 ClassLoaderData::~ClassLoaderData() {
 793   // Release C heap structures for all the classes.
 794   ReleaseKlassClosure cl;
 795   classes_do(&cl);
 796 
 797   ClassLoaderDataGraph::dec_array_classes(cl.array_class_released());
 798   ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released());
 799 
 800   // Release the WeakHandle
 801   _holder.release(Universe::vm_weak());
 802 
 803   // Release C heap allocated hashtable for all the packages.
 804   if (_packages != NULL) {
 805     // Destroy the table itself
 806     delete _packages;
 807     _packages = NULL;
 808   }
 809 
 810   // Release C heap allocated hashtable for all the modules.
 811   if (_modules != NULL) {
 812     // Destroy the table itself
 813     delete _modules;
 814     _modules = NULL;
 815   }
 816 
 817   // Release C heap allocated hashtable for the dictionary
 818   if (_dictionary != NULL) {
 819     // Destroy the table itself
 820     delete _dictionary;
 821     _dictionary = NULL;
 822   }
 823 
 824   if (_unnamed_module != NULL) {
 825     _unnamed_module->delete_unnamed_module();
 826     _unnamed_module = NULL;
 827   }
 828 
 829   // release the metaspace
 830   ClassLoaderMetaspace *m = _metaspace;
 831   if (m != NULL) {
 832     _metaspace = NULL;
 833     delete m;
 834   }
 835   // Clear all the JNI handles for methods
 836   // These aren't deallocated and are going to look like a leak, but that's
 837   // needed because we can't really get rid of jmethodIDs because we don't
 838   // know when native code is going to stop using them.  The spec says that
 839   // they're "invalid" but existing programs likely rely on their being
 840   // NULL after class unloading.
 841   if (_jmethod_ids != NULL) {
 842     Method::clear_jmethod_ids(this);
 843   }
 844   // Delete lock
 845   delete _metaspace_lock;
 846 
 847   // Delete free list
 848   if (_deallocate_list != NULL) {
 849     delete _deallocate_list;
 850   }
 851 
 852   // Decrement refcounts of Symbols if created.
 853   if (_name != NULL) {
 854     _name->decrement_refcount();
 855   }
 856   if (_name_and_id != NULL) {
 857     _name_and_id->decrement_refcount();
 858   }
 859 }
 860 
 861 // Returns true if this class loader data is for the app class loader
 862 // or a user defined system class loader.  (Note that the class loader
 863 // data may have a Class holder.)
 864 bool ClassLoaderData::is_system_class_loader_data() const {
 865   return SystemDictionary::is_system_class_loader(class_loader());
 866 }
 867 
 868 // Returns true if this class loader data is for the platform class loader.
 869 // (Note that the class loader data may have a Class holder.)
 870 bool ClassLoaderData::is_platform_class_loader_data() const {
 871   return SystemDictionary::is_platform_class_loader(class_loader());
 872 }
 873 
 874 // Returns true if the class loader for this class loader data is one of
 875 // the 3 builtin (boot application/system or platform) class loaders,
 876 // including a user-defined system class loader.  Note that if the class
 877 // loader data is for a non-strong hidden class or unsafe anonymous class then it may
 878 // get freed by a GC even if its class loader is one of these loaders.
 879 bool ClassLoaderData::is_builtin_class_loader_data() const {
 880   return (is_boot_class_loader_data() ||
 881           SystemDictionary::is_system_class_loader(class_loader()) ||
 882           SystemDictionary::is_platform_class_loader(class_loader()));
 883 }
 884 
 885 // Returns true if this class loader data is a class loader data
 886 // that is not ever freed by a GC.  It must be the CLD for one of the builtin
 887 // class loaders and not the CLD for a non-strong hidden class or unsafe anonymous class.
 888 bool ClassLoaderData::is_permanent_class_loader_data() const {
 889   return is_builtin_class_loader_data() && !has_class_mirror_holder();
 890 }
 891 
 892 ClassLoaderMetaspace* ClassLoaderData::metaspace_non_null() {
 893   // If the metaspace has not been allocated, create a new one.  Might want
 894   // to create smaller arena for Reflection class loaders also.
 895   // The reason for the delayed allocation is because some class loaders are
 896   // simply for delegating with no metadata of their own.
 897   // Lock-free access requires load_acquire.
 898   ClassLoaderMetaspace* metaspace = Atomic::load_acquire(&_metaspace);
 899   if (metaspace == NULL) {
 900     MutexLocker ml(_metaspace_lock,  Mutex::_no_safepoint_check_flag);
 901     // Check if _metaspace got allocated while we were waiting for this lock.
 902     if ((metaspace = _metaspace) == NULL) {
 903       if (this == the_null_class_loader_data()) {
 904         assert (class_loader() == NULL, "Must be");
 905         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
 906       } else if (has_class_mirror_holder()) {
 907         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ClassMirrorHolderMetaspaceType);
 908       } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 909         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
 910       } else {
 911         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
 912       }
 913       // Ensure _metaspace is stable, since it is examined without a lock
 914       Atomic::release_store(&_metaspace, metaspace);
 915     }
 916   }
 917   return metaspace;
 918 }
 919 
 920 OopHandle ClassLoaderData::add_handle(Handle h) {
 921   MutexLocker ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 922   record_modified_oops();
 923   return _handles.add(h());
 924 }
 925 
 926 void ClassLoaderData::remove_handle(OopHandle h) {
 927   assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading");
 928   oop* ptr = h.ptr_raw();
 929   if (ptr != NULL) {
 930     assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
 931     NativeAccess<>::oop_store(ptr, oop(NULL));
 932   }
 933 }
 934 
 935 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) {
 936   MutexLocker ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 937   if (dest.resolve() != NULL) {
 938     return;
 939   } else {
 940     dest = _handles.add(h());
 941   }
 942 }
 943 
 944 // Add this metadata pointer to be freed when it's safe.  This is only during
 945 // a safepoint which checks if handles point to this metadata field.
 946 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
 947   // Metadata in shared region isn't deleted.
 948   if (!m->is_shared()) {
 949     MutexLocker ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 950     if (_deallocate_list == NULL) {
 951       _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, mtClass);
 952     }
 953     _deallocate_list->append_if_missing(m);
 954     log_debug(class, loader, data)("deallocate added for %s", m->print_value_string());
 955     ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 956   }
 957 }
 958 
 959 // Deallocate free metadata on the free list.  How useful the PermGen was!
 960 void ClassLoaderData::free_deallocate_list() {
 961   // This must be called at a safepoint because it depends on metadata walking at
 962   // safepoint cleanup time.
 963   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 964   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 965   if (_deallocate_list == NULL) {
 966     return;
 967   }
 968   // Go backwards because this removes entries that are freed.
 969   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 970     Metadata* m = _deallocate_list->at(i);
 971     if (!m->on_stack()) {
 972       _deallocate_list->remove_at(i);
 973       // There are only three types of metadata that we deallocate directly.
 974       // Cast them so they can be used by the template function.
 975       if (m->is_method()) {
 976         MetadataFactory::free_metadata(this, (Method*)m);
 977       } else if (m->is_constantPool()) {
 978         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 979       } else if (m->is_klass()) {
 980         MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 981       } else {
 982         ShouldNotReachHere();
 983       }
 984     } else {
 985       // Metadata is alive.
 986       // If scratch_class is on stack then it shouldn't be on this list!
 987       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 988              "scratch classes on this list should be dead");
 989       // Also should assert that other metadata on the list was found in handles.
 990       // Some cleaning remains.
 991       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 992     }
 993   }
 994 }
 995 
 996 // This is distinct from free_deallocate_list.  For class loader data that are
 997 // unloading, this frees the C heap memory for items on the list, and unlinks
 998 // scratch or error classes so that unloading events aren't triggered for these
 999 // classes. The metadata is removed with the unloading metaspace.
1000 // There isn't C heap memory allocated for methods, so nothing is done for them.
1001 void ClassLoaderData::free_deallocate_list_C_heap_structures() {
1002   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1003   assert(is_unloading(), "only called for ClassLoaderData that are unloading");
1004   if (_deallocate_list == NULL) {
1005     return;
1006   }
1007   // Go backwards because this removes entries that are freed.
1008   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
1009     Metadata* m = _deallocate_list->at(i);
1010     _deallocate_list->remove_at(i);
1011     if (m->is_constantPool()) {
1012       ((ConstantPool*)m)->release_C_heap_structures();
1013     } else if (m->is_klass()) {
1014       InstanceKlass* ik = (InstanceKlass*)m;
1015       // also releases ik->constants() C heap memory
1016       ik->release_C_heap_structures();
1017       // Remove the class so unloading events aren't triggered for
1018       // this class (scratch or error class) in do_unloading().
1019       remove_class(ik);
1020     }
1021   }
1022 }
1023 
1024 // Caller needs ResourceMark
1025 // If the class loader's _name has not been explicitly set, the class loader's
1026 // qualified class name is returned.
1027 const char* ClassLoaderData::loader_name() const {
1028    if (_class_loader_klass == NULL) {
1029      return BOOTSTRAP_LOADER_NAME;
1030    } else if (_name != NULL) {
1031      return _name->as_C_string();
1032    } else {
1033      return _class_loader_klass->external_name();
1034    }
1035 }
1036 
1037 // Caller needs ResourceMark
1038 // Format of the _name_and_id is as follows:
1039 //   If the defining loader has a name explicitly set then '<loader-name>' @<id>
1040 //   If the defining loader has no name then <qualified-class-name> @<id>
1041 //   If built-in loader, then omit '@<id>' as there is only one instance.
1042 const char* ClassLoaderData::loader_name_and_id() const {
1043   if (_class_loader_klass == NULL) {
1044     return "'" BOOTSTRAP_LOADER_NAME "'";
1045   } else if (_name_and_id != NULL) {
1046     return _name_and_id->as_C_string();
1047   } else {
1048     // May be called in a race before _name_and_id is initialized.
1049     return _class_loader_klass->external_name();
1050   }
1051 }
1052 
1053 void ClassLoaderData::print_value_on(outputStream* out) const {
1054   if (!is_unloading() && class_loader() != NULL) {
1055     out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this));
1056     class_loader()->print_value_on(out);  // includes loader_name_and_id() and address of class loader instance
1057   } else {
1058     // loader data: 0xsomeaddr of 'bootstrap'
1059     out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name_and_id());
1060   }
1061   if (_has_class_mirror_holder) {
1062     out->print(" has a class holder");
1063   }
1064 }
1065 
1066 void ClassLoaderData::print_value() const { print_value_on(tty); }
1067 
1068 #ifndef PRODUCT
1069 void ClassLoaderData::print_on(outputStream* out) const {
1070   out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: %s {",
1071               p2i(this), p2i(_class_loader.ptr_raw()), loader_name_and_id());
1072   if (has_class_mirror_holder()) out->print(" has a class holder");
1073   if (claimed()) out->print(" claimed");
1074   if (is_unloading()) out->print(" unloading");
1075   out->print(" metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null()));
1076 
1077   if (_jmethod_ids != NULL) {
1078     Method::print_jmethod_ids(this, out);
1079   }
1080   out->print(" handles count %d", _handles.count());
1081   out->print(" dependencies %d", _dependency_count);
1082   out->print_cr("}");
1083 }
1084 #endif // PRODUCT
1085 
1086 void ClassLoaderData::print() const { print_on(tty); }
1087 
1088 void ClassLoaderData::verify() {
1089   assert_locked_or_safepoint(_metaspace_lock);
1090   oop cl = class_loader();
1091 
1092   guarantee(this == class_loader_data(cl) || has_class_mirror_holder(), "Must be the same");
1093   guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || has_class_mirror_holder(), "must be");
1094 
1095   // Verify the integrity of the allocated space.
1096   if (metaspace_or_null() != NULL) {
1097     metaspace_or_null()->verify();
1098   }
1099 
1100   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
1101     guarantee(k->class_loader_data() == this, "Must be the same");
1102     k->verify();
1103     assert(k != k->next_link(), "no loops!");
1104   }
1105 }
1106 
1107 bool ClassLoaderData::contains_klass(Klass* klass) {
1108   // Lock-free access requires load_acquire
1109   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
1110     if (k == klass) return true;
1111   }
1112   return false;
1113 }