1  /*
   2  * Copyright (c) 2012, 2020, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 // A ClassLoaderData identifies the full set of class types that a class
  26 // loader's name resolution strategy produces for a given configuration of the
  27 // class loader.
  28 // Class types in the ClassLoaderData may be defined by from class file binaries
  29 // provided by the class loader, or from other class loader it interacts with
  30 // according to its name resolution strategy.
  31 //
  32 // Class loaders that implement a deterministic name resolution strategy
  33 // (including with respect to their delegation behavior), such as the boot, the
  34 // platform, and the system loaders of the JDK's built-in class loader
  35 // hierarchy, always produce the same linkset for a given configuration.
  36 //
  37 // ClassLoaderData carries information related to a linkset (e.g.,
  38 // metaspace holding its klass definitions).
  39 // The System Dictionary and related data structures (e.g., placeholder table,
  40 // loader constraints table) as well as the runtime representation of classes
  41 // only reference ClassLoaderData.
  42 //
  43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that
  44 // that represent the loader's "linking domain" in the JVM.
  45 //
  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"
  50 #include "classfile/classLoaderData.inline.hpp"
  51 #include "classfile/classLoaderDataGraph.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "logging/log.hpp"
  59 #include "logging/logStream.hpp"
  60 #include "memory/allocation.inline.hpp"
  61 #include "memory/metadataFactory.hpp"
  62 #include "memory/resourceArea.hpp"
  63 #include "memory/universe.hpp"
  64 #include "oops/access.inline.hpp"
  65 #include "oops/oop.inline.hpp"
  66 #include "oops/oopHandle.inline.hpp"
  67 #include "oops/weakHandle.inline.hpp"
  68 #include "runtime/atomic.hpp"
  69 #include "runtime/handles.inline.hpp"
  70 #include "runtime/mutex.hpp"
  71 #include "runtime/safepoint.hpp"
  72 #include "utilities/growableArray.hpp"
  73 #include "utilities/macros.hpp"
  74 #include "utilities/ostream.hpp"
  75 
  76 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  77 
  78 void ClassLoaderData::init_null_class_loader_data() {
  79   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  80   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  81 
  82   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  83   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  84   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  85 
  86   LogTarget(Trace, class, loader, data) lt;
  87   if (lt.is_enabled()) {
  88     ResourceMark rm;
  89     LogStream ls(lt);
  90     ls.print("create ");
  91     _the_null_class_loader_data->print_value_on(&ls);
  92     ls.cr();
  93   }
  94 }
  95 
  96 // Obtain and set the class loader's name within the ClassLoaderData so
  97 // it will be available for error messages, logging, JFR, etc.  The name
  98 // and klass are available after the class_loader oop is no longer alive,
  99 // during unloading.
 100 void ClassLoaderData::initialize_name(Handle class_loader) {
 101   Thread* THREAD = Thread::current();
 102   ResourceMark rm(THREAD);
 103 
 104   // Obtain the class loader's name.  If the class loader's name was not
 105   // explicitly set during construction, the CLD's _name field will be null.
 106   oop cl_name = java_lang_ClassLoader::name(class_loader());
 107   if (cl_name != NULL) {
 108     const char* cl_instance_name = java_lang_String::as_utf8_string(cl_name);
 109 
 110     if (cl_instance_name != NULL && cl_instance_name[0] != '\0') {
 111       _name = SymbolTable::new_symbol(cl_instance_name);
 112     }
 113   }
 114 
 115   // Obtain the class loader's name and identity hash.  If the class loader's
 116   // name was not explicitly set during construction, the class loader's name and id
 117   // will be set to the qualified class name of the class loader along with its
 118   // identity hash.
 119   // If for some reason the ClassLoader's constructor has not been run, instead of
 120   // leaving the _name_and_id field null, fall back to the external qualified class
 121   // name.  Thus CLD's _name_and_id field should never have a null value.
 122   oop cl_name_and_id = java_lang_ClassLoader::nameAndId(class_loader());
 123   const char* cl_instance_name_and_id =
 124                   (cl_name_and_id == NULL) ? _class_loader_klass->external_name() :
 125                                              java_lang_String::as_utf8_string(cl_name_and_id);
 126   assert(cl_instance_name_and_id != NULL && cl_instance_name_and_id[0] != '\0', "class loader has no name and id");
 127   _name_and_id = SymbolTable::new_symbol(cl_instance_name_and_id);
 128 }
 129 
 130 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool has_class_mirror_holder) :
 131   _metaspace(NULL),
 132   _metaspace_lock(new Mutex(Mutex::leaf+1, "Metaspace allocation lock", true,
 133                             Mutex::_safepoint_check_never)),
 134   _unloading(false), _has_class_mirror_holder(has_class_mirror_holder),
 135   _modified_oops(true), _accumulated_modified_oops(false),
 136   // An unsafe anonymous class loader data doesn't have anything to keep
 137   // it from being unloaded during parsing of the unsafe anonymous class.
 138   // The null-class-loader should always be kept alive.
 139   _keep_alive((has_class_mirror_holder || h_class_loader.is_null()) ? 1 : 0),
 140   _claim(0),
 141   _handles(),
 142   _klasses(NULL), _packages(NULL), _modules(NULL), _unnamed_module(NULL), _dictionary(NULL),
 143   _jmethod_ids(NULL),
 144   _deallocate_list(NULL),
 145   _next(NULL),
 146   _class_loader_klass(NULL), _name(NULL), _name_and_id(NULL) {
 147 
 148   if (!h_class_loader.is_null()) {
 149     _class_loader = _handles.add(h_class_loader());
 150     _class_loader_klass = h_class_loader->klass();
 151     initialize_name(h_class_loader);
 152   }
 153 
 154   if (!has_class_mirror_holder) {
 155     // The holder is initialized later for non-strong hidden classes and unsafe anonymous classes,
 156     // and before calling anything that call class_loader().
 157     initialize_holder(h_class_loader);
 158 
 159     // A ClassLoaderData created solely for a non-strong hidden class or unsafe anonymous class should
 160     // never have a ModuleEntryTable or PackageEntryTable created for it. The defining package
 161     // and module for an unsafe anonymous class will be found in its host class.
 162     _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size);
 163     if (h_class_loader.is_null()) {
 164       // Create unnamed module for boot loader
 165       _unnamed_module = ModuleEntry::create_boot_unnamed_module(this);
 166     } else {
 167       // Create unnamed module for all other loaders
 168       _unnamed_module = ModuleEntry::create_unnamed_module(this);
 169     }
 170     _dictionary = create_dictionary();
 171   }
 172 
 173   NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies
 174 
 175   JFR_ONLY(INIT_ID(this);)
 176 }
 177 
 178 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
 179   Chunk* c = _head;
 180   while (c != NULL) {
 181     Chunk* next = c->_next;
 182     delete c;
 183     c = next;
 184   }
 185 }
 186 
 187 OopHandle ClassLoaderData::ChunkedHandleList::add(oop o) {
 188   if (_head == NULL || _head->_size == Chunk::CAPACITY) {
 189     Chunk* next = new Chunk(_head);
 190     Atomic::release_store(&_head, next);
 191   }
 192   oop* handle = &_head->_data[_head->_size];
 193   NativeAccess<IS_DEST_UNINITIALIZED>::oop_store(handle, o);
 194   Atomic::release_store(&_head->_size, _head->_size + 1);
 195   return OopHandle(handle);
 196 }
 197 
 198 int ClassLoaderData::ChunkedHandleList::count() const {
 199   int count = 0;
 200   Chunk* chunk = _head;
 201   while (chunk != NULL) {
 202     count += chunk->_size;
 203     chunk = chunk->_next;
 204   }
 205   return count;
 206 }
 207 
 208 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
 209   for (juint i = 0; i < size; i++) {
 210     if (c->_data[i] != NULL) {
 211       f->do_oop(&c->_data[i]);
 212     }
 213   }
 214 }
 215 
 216 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
 217   Chunk* head = Atomic::load_acquire(&_head);
 218   if (head != NULL) {
 219     // Must be careful when reading size of head
 220     oops_do_chunk(f, head, Atomic::load_acquire(&head->_size));
 221     for (Chunk* c = head->_next; c != NULL; c = c->_next) {
 222       oops_do_chunk(f, c, c->_size);
 223     }
 224   }
 225 }
 226 
 227 class VerifyContainsOopClosure : public OopClosure {
 228   oop  _target;
 229   bool _found;
 230 
 231  public:
 232   VerifyContainsOopClosure(oop target) : _target(target), _found(false) {}
 233 
 234   void do_oop(oop* p) {
 235     if (p != NULL && NativeAccess<AS_NO_KEEPALIVE>::oop_load(p) == _target) {
 236       _found = true;
 237     }
 238   }
 239 
 240   void do_oop(narrowOop* p) {
 241     // The ChunkedHandleList should not contain any narrowOop
 242     ShouldNotReachHere();
 243   }
 244 
 245   bool found() const {
 246     return _found;
 247   }
 248 };
 249 
 250 bool ClassLoaderData::ChunkedHandleList::contains(oop p) {
 251   VerifyContainsOopClosure cl(p);
 252   oops_do(&cl);
 253   return cl.found();
 254 }
 255 
 256 #ifndef PRODUCT
 257 bool ClassLoaderData::ChunkedHandleList::owner_of(oop* oop_handle) {
 258   Chunk* chunk = _head;
 259   while (chunk != NULL) {
 260     if (&(chunk->_data[0]) <= oop_handle && oop_handle < &(chunk->_data[chunk->_size])) {
 261       return true;
 262     }
 263     chunk = chunk->_next;
 264   }
 265   return false;
 266 }
 267 #endif // PRODUCT
 268 
 269 void ClassLoaderData::clear_claim(int claim) {
 270   for (;;) {
 271     int old_claim = Atomic::load(&_claim);
 272     if ((old_claim & claim) == 0) {
 273       return;
 274     }
 275     int new_claim = old_claim & ~claim;
 276     if (Atomic::cmpxchg(&_claim, old_claim, new_claim) == old_claim) {
 277       return;
 278     }
 279   }
 280 }
 281 
 282 bool ClassLoaderData::try_claim(int claim) {
 283   for (;;) {
 284     int old_claim = Atomic::load(&_claim);
 285     if ((old_claim & claim) == claim) {
 286       return false;
 287     }
 288     int new_claim = old_claim | claim;
 289     if (Atomic::cmpxchg(&_claim, old_claim, new_claim) == old_claim) {
 290       return true;
 291     }
 292   }
 293 }
 294 
 295 // Weak hidden and unsafe anonymous classes have their own ClassLoaderData that is marked to keep alive
 296 // while the class is being parsed, and if the class appears on the module fixup list.
 297 // Due to the uniqueness that no other class shares the hidden or unsafe anonymous class' name or
 298 // ClassLoaderData, no other non-GC thread has knowledge of the hidden or unsafe anonymous class while
 299 // it is being defined, therefore _keep_alive is not volatile or atomic.
 300 void ClassLoaderData::inc_keep_alive() {
 301   if (has_class_mirror_holder()) {
 302     assert(_keep_alive > 0, "Invalid keep alive increment count");
 303     _keep_alive++;
 304   }
 305 }
 306 
 307 void ClassLoaderData::dec_keep_alive() {
 308   if (has_class_mirror_holder()) {
 309     assert(_keep_alive > 0, "Invalid keep alive decrement count");
 310     _keep_alive--;
 311   }
 312 }
 313 
 314 void ClassLoaderData::oops_do(OopClosure* f, int claim_value, bool clear_mod_oops) {
 315   if (claim_value != ClassLoaderData::_claim_none && !try_claim(claim_value)) {
 316     return;
 317   }
 318 
 319   // Only clear modified_oops after the ClassLoaderData is claimed.
 320   if (clear_mod_oops) {
 321     clear_modified_oops();
 322   }
 323 
 324   _handles.oops_do(f);
 325 }
 326 
 327 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
 328   // Lock-free access requires load_acquire
 329   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 330     klass_closure->do_klass(k);
 331     assert(k != k->next_link(), "no loops!");
 332   }
 333 }
 334 
 335 void ClassLoaderData::classes_do(void f(Klass * const)) {
 336   // Lock-free access requires load_acquire
 337   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 338     f(k);
 339     assert(k != k->next_link(), "no loops!");
 340   }
 341 }
 342 
 343 void ClassLoaderData::methods_do(void f(Method*)) {
 344   // Lock-free access requires load_acquire
 345   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 346     if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
 347       InstanceKlass::cast(k)->methods_do(f);
 348     }
 349   }
 350 }
 351 
 352 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
 353   // Lock-free access requires load_acquire
 354   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 355     // Do not filter ArrayKlass oops here...
 356     if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
 357 #ifdef ASSERT
 358       oop m = k->java_mirror();
 359       assert(m != NULL, "NULL mirror");
 360       assert(m->is_a(SystemDictionary::Class_klass()), "invalid mirror");
 361 #endif
 362       klass_closure->do_klass(k);
 363     }
 364   }
 365 }
 366 
 367 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 368   // Lock-free access requires load_acquire
 369   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 370     if (k->is_instance_klass()) {
 371       f(InstanceKlass::cast(k));
 372     }
 373     assert(k != k->next_link(), "no loops!");
 374   }
 375 }
 376 
 377 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 378   assert_locked_or_safepoint(Module_lock);
 379   if (_unnamed_module != NULL) {
 380     f(_unnamed_module);
 381   }
 382   if (_modules != NULL) {
 383     for (int i = 0; i < _modules->table_size(); i++) {
 384       for (ModuleEntry* entry = _modules->bucket(i);
 385            entry != NULL;
 386            entry = entry->next()) {
 387         f(entry);
 388       }
 389     }
 390   }
 391 }
 392 
 393 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 394   assert_locked_or_safepoint(Module_lock);
 395   if (_packages != NULL) {
 396     for (int i = 0; i < _packages->table_size(); i++) {
 397       for (PackageEntry* entry = _packages->bucket(i);
 398            entry != NULL;
 399            entry = entry->next()) {
 400         f(entry);
 401       }
 402     }
 403   }
 404 }
 405 
 406 void ClassLoaderData::record_dependency(const Klass* k) {
 407   assert(k != NULL, "invariant");
 408 
 409   ClassLoaderData * const from_cld = this;
 410   ClassLoaderData * const to_cld = k->class_loader_data();
 411 
 412   // Do not need to record dependency if the dependency is to a class whose
 413   // class loader data is never freed.  (i.e. the dependency's class loader
 414   // is one of the three builtin class loaders and the dependency's class
 415   // loader data has a ClassLoader holder, not a Class holder.)
 416   if (to_cld->is_permanent_class_loader_data()) {
 417     return;
 418   }
 419 
 420   oop to;
 421   if (to_cld->has_class_mirror_holder()) {
 422     // Just return if a non-strong hidden class or unsafe anonymous class is attempting to record a dependency
 423     // to itself.  (Note that every non-strong hidden class or unsafe anonymous class has its own unique class
 424     // loader data.)
 425     if (to_cld == from_cld) {
 426       return;
 427     }
 428     // Hidden and unsafe anonymous class dependencies are through the mirror.
 429     to = k->java_mirror();
 430   } else {
 431     to = to_cld->class_loader();
 432     oop from = from_cld->class_loader();
 433 
 434     // Just return if this dependency is to a class with the same or a parent
 435     // class_loader.
 436     if (from == to || java_lang_ClassLoader::isAncestor(from, to)) {
 437       return; // this class loader is in the parent list, no need to add it.
 438     }
 439   }
 440 
 441   // It's a dependency we won't find through GC, add it.
 442   if (!_handles.contains(to)) {
 443     NOT_PRODUCT(Atomic::inc(&_dependency_count));
 444     LogTarget(Trace, class, loader, data) lt;
 445     if (lt.is_enabled()) {
 446       ResourceMark rm;
 447       LogStream ls(lt);
 448       ls.print("adding dependency from ");
 449       print_value_on(&ls);
 450       ls.print(" to ");
 451       to_cld->print_value_on(&ls);
 452       ls.cr();
 453     }
 454     Handle dependency(Thread::current(), to);
 455     add_handle(dependency);
 456     // Added a potentially young gen oop to the ClassLoaderData
 457     record_modified_oops();
 458   }
 459 }
 460 
 461 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
 462   {
 463     MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 464     Klass* old_value = _klasses;
 465     k->set_next_link(old_value);
 466     // Link the new item into the list, making sure the linked class is stable
 467     // since the list can be walked without a lock
 468     Atomic::release_store(&_klasses, k);
 469     if (k->is_array_klass()) {
 470       ClassLoaderDataGraph::inc_array_classes(1);
 471     } else {
 472       ClassLoaderDataGraph::inc_instance_classes(1);
 473     }
 474   }
 475 
 476   if (publicize) {
 477     LogTarget(Trace, class, loader, data) lt;
 478     if (lt.is_enabled()) {
 479       ResourceMark rm;
 480       LogStream ls(lt);
 481       ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name());
 482       print_value_on(&ls);
 483       ls.cr();
 484     }
 485   }
 486 }
 487 
 488 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) {
 489   if (loader_or_mirror() != NULL) {
 490     assert(_holder.is_null(), "never replace holders");
 491     _holder = WeakHandle(Universe::vm_weak(), loader_or_mirror);
 492   }
 493 }
 494 
 495 // Remove a klass from the _klasses list for scratch_class during redefinition
 496 // or parsed class in the case of an error.
 497 void ClassLoaderData::remove_class(Klass* scratch_class) {
 498   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
 499 
 500   // Adjust global class iterator.
 501   ClassLoaderDataGraph::adjust_saved_class(scratch_class);
 502 
 503   Klass* prev = NULL;
 504   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 505     if (k == scratch_class) {
 506       if (prev == NULL) {
 507         _klasses = k->next_link();
 508       } else {
 509         Klass* next = k->next_link();
 510         prev->set_next_link(next);
 511       }
 512 
 513       if (k->is_array_klass()) {
 514         ClassLoaderDataGraph::dec_array_classes(1);
 515       } else {
 516         ClassLoaderDataGraph::dec_instance_classes(1);
 517       }
 518 
 519       return;
 520     }
 521     prev = k;
 522     assert(k != k->next_link(), "no loops!");
 523   }
 524   ShouldNotReachHere();   // should have found this class!!
 525 }
 526 
 527 void ClassLoaderData::unload() {
 528   _unloading = true;
 529 
 530   LogTarget(Trace, class, loader, data) lt;
 531   if (lt.is_enabled()) {
 532     ResourceMark rm;
 533     LogStream ls(lt);
 534     ls.print("unload");
 535     print_value_on(&ls);
 536     ls.cr();
 537   }
 538 
 539   // Some items on the _deallocate_list need to free their C heap structures
 540   // if they are not already on the _klasses list.
 541   free_deallocate_list_C_heap_structures();
 542 
 543   // Clean up class dependencies and tell serviceability tools
 544   // these classes are unloading.  Must be called
 545   // after erroneous classes are released.
 546   classes_do(InstanceKlass::unload_class);
 547 
 548   // Clean up global class iterator for compiler
 549   ClassLoaderDataGraph::adjust_saved_class(this);
 550 }
 551 
 552 ModuleEntryTable* ClassLoaderData::modules() {
 553   // Lazily create the module entry table at first request.
 554   // Lock-free access requires load_acquire.
 555   ModuleEntryTable* modules = Atomic::load_acquire(&_modules);
 556   if (modules == NULL) {
 557     MutexLocker m1(Module_lock);
 558     // Check if _modules got allocated while we were waiting for this lock.
 559     if ((modules = _modules) == NULL) {
 560       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 561 
 562       {
 563         MutexLocker m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 564         // Ensure _modules is stable, since it is examined without a lock
 565         Atomic::release_store(&_modules, modules);
 566       }
 567     }
 568   }
 569   return modules;
 570 }
 571 
 572 const int _boot_loader_dictionary_size    = 1009;
 573 const int _default_loader_dictionary_size = 107;
 574 
 575 Dictionary* ClassLoaderData::create_dictionary() {
 576   assert(!has_class_mirror_holder(), "class mirror holder cld does not have a dictionary");
 577   int size;
 578   bool resizable = false;
 579   if (_the_null_class_loader_data == NULL) {
 580     size = _boot_loader_dictionary_size;
 581     resizable = true;
 582   } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 583     size = 1;  // there's only one class in relection class loader and no initiated classes
 584   } else if (is_system_class_loader_data()) {
 585     size = _boot_loader_dictionary_size;
 586     resizable = true;
 587   } else {
 588     size = _default_loader_dictionary_size;
 589     resizable = true;
 590   }
 591   if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces) {
 592     resizable = false;
 593   }
 594   return new Dictionary(this, size, resizable);
 595 }
 596 
 597 // Tell the GC to keep this klass alive while iterating ClassLoaderDataGraph
 598 oop ClassLoaderData::holder_phantom() const {
 599   // A klass that was previously considered dead can be looked up in the
 600   // CLD/SD, and its _java_mirror or _class_loader can be stored in a root
 601   // or a reachable object making it alive again. The SATB part of G1 needs
 602   // to get notified about this potential resurrection, otherwise the marking
 603   // might not find the object.
 604   if (!_holder.is_null()) {  // NULL class_loader
 605     return _holder.resolve();
 606   } else {
 607     return NULL;
 608   }
 609 }
 610 
 611 // Let the GC read the holder without keeping it alive.
 612 oop ClassLoaderData::holder_no_keepalive() const {
 613   if (!_holder.is_null()) {  // NULL class_loader
 614     return _holder.peek();
 615   } else {
 616     return NULL;
 617   }
 618 }
 619 
 620 // Unloading support
 621 bool ClassLoaderData::is_alive() const {
 622   bool alive = keep_alive()         // null class loader and incomplete non-strong hidden class or unsafe anonymous class.
 623       || (_holder.peek() != NULL);  // and not cleaned by the GC weak handle processing.
 624 
 625   return alive;
 626 }
 627 
 628 class ReleaseKlassClosure: public KlassClosure {
 629 private:
 630   size_t  _instance_class_released;
 631   size_t  _array_class_released;
 632 public:
 633   ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { }
 634 
 635   size_t instance_class_released() const { return _instance_class_released; }
 636   size_t array_class_released()    const { return _array_class_released;    }
 637 
 638   void do_klass(Klass* k) {
 639     if (k->is_array_klass()) {
 640       _array_class_released ++;
 641     } else {
 642       assert(k->is_instance_klass(), "Must be");
 643       _instance_class_released ++;
 644     }
 645     k->release_C_heap_structures();
 646   }
 647 };
 648 
 649 ClassLoaderData::~ClassLoaderData() {
 650   // Release C heap structures for all the classes.
 651   ReleaseKlassClosure cl;
 652   classes_do(&cl);
 653 
 654   ClassLoaderDataGraph::dec_array_classes(cl.array_class_released());
 655   ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released());
 656 
 657   // Release the WeakHandle
 658   _holder.release(Universe::vm_weak());
 659 
 660   // Release C heap allocated hashtable for all the packages.
 661   if (_packages != NULL) {
 662     // Destroy the table itself
 663     delete _packages;
 664     _packages = NULL;
 665   }
 666 
 667   // Release C heap allocated hashtable for all the modules.
 668   if (_modules != NULL) {
 669     // Destroy the table itself
 670     delete _modules;
 671     _modules = NULL;
 672   }
 673 
 674   // Release C heap allocated hashtable for the dictionary
 675   if (_dictionary != NULL) {
 676     // Destroy the table itself
 677     delete _dictionary;
 678     _dictionary = NULL;
 679   }
 680 
 681   if (_unnamed_module != NULL) {
 682     _unnamed_module->delete_unnamed_module();
 683     _unnamed_module = NULL;
 684   }
 685 
 686   // release the metaspace
 687   ClassLoaderMetaspace *m = _metaspace;
 688   if (m != NULL) {
 689     _metaspace = NULL;
 690     delete m;
 691   }
 692   // Clear all the JNI handles for methods
 693   // These aren't deallocated and are going to look like a leak, but that's
 694   // needed because we can't really get rid of jmethodIDs because we don't
 695   // know when native code is going to stop using them.  The spec says that
 696   // they're "invalid" but existing programs likely rely on their being
 697   // NULL after class unloading.
 698   if (_jmethod_ids != NULL) {
 699     Method::clear_jmethod_ids(this);
 700   }
 701   // Delete lock
 702   delete _metaspace_lock;
 703 
 704   // Delete free list
 705   if (_deallocate_list != NULL) {
 706     delete _deallocate_list;
 707   }
 708 
 709   // Decrement refcounts of Symbols if created.
 710   if (_name != NULL) {
 711     _name->decrement_refcount();
 712   }
 713   if (_name_and_id != NULL) {
 714     _name_and_id->decrement_refcount();
 715   }
 716 }
 717 
 718 // Returns true if this class loader data is for the app class loader
 719 // or a user defined system class loader.  (Note that the class loader
 720 // data may have a Class holder.)
 721 bool ClassLoaderData::is_system_class_loader_data() const {
 722   return SystemDictionary::is_system_class_loader(class_loader());
 723 }
 724 
 725 // Returns true if this class loader data is for the platform class loader.
 726 // (Note that the class loader data may have a Class holder.)
 727 bool ClassLoaderData::is_platform_class_loader_data() const {
 728   return SystemDictionary::is_platform_class_loader(class_loader());
 729 }
 730 
 731 // Returns true if the class loader for this class loader data is one of
 732 // the 3 builtin (boot application/system or platform) class loaders,
 733 // including a user-defined system class loader.  Note that if the class
 734 // loader data is for a non-strong hidden class or unsafe anonymous class then it may
 735 // get freed by a GC even if its class loader is one of these loaders.
 736 bool ClassLoaderData::is_builtin_class_loader_data() const {
 737   return (is_boot_class_loader_data() ||
 738           SystemDictionary::is_system_class_loader(class_loader()) ||
 739           SystemDictionary::is_platform_class_loader(class_loader()));
 740 }
 741 
 742 // Returns true if this class loader data is a class loader data
 743 // that is not ever freed by a GC.  It must be the CLD for one of the builtin
 744 // class loaders and not the CLD for a non-strong hidden class or unsafe anonymous class.
 745 bool ClassLoaderData::is_permanent_class_loader_data() const {
 746   return is_builtin_class_loader_data() && !has_class_mirror_holder();
 747 }
 748 
 749 ClassLoaderMetaspace* ClassLoaderData::metaspace_non_null() {
 750   // If the metaspace has not been allocated, create a new one.  Might want
 751   // to create smaller arena for Reflection class loaders also.
 752   // The reason for the delayed allocation is because some class loaders are
 753   // simply for delegating with no metadata of their own.
 754   // Lock-free access requires load_acquire.
 755   ClassLoaderMetaspace* metaspace = Atomic::load_acquire(&_metaspace);
 756   if (metaspace == NULL) {
 757     MutexLocker ml(_metaspace_lock,  Mutex::_no_safepoint_check_flag);
 758     // Check if _metaspace got allocated while we were waiting for this lock.
 759     if ((metaspace = _metaspace) == NULL) {
 760       if (this == the_null_class_loader_data()) {
 761         assert (class_loader() == NULL, "Must be");
 762         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
 763       } else if (has_class_mirror_holder()) {
 764         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ClassMirrorHolderMetaspaceType);
 765       } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 766         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
 767       } else {
 768         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
 769       }
 770       // Ensure _metaspace is stable, since it is examined without a lock
 771       Atomic::release_store(&_metaspace, metaspace);
 772     }
 773   }
 774   return metaspace;
 775 }
 776 
 777 OopHandle ClassLoaderData::add_handle(Handle h) {
 778   MutexLocker ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 779   record_modified_oops();
 780   return _handles.add(h());
 781 }
 782 
 783 void ClassLoaderData::remove_handle(OopHandle h) {
 784   assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading");
 785   oop* ptr = h.ptr_raw();
 786   if (ptr != NULL) {
 787     assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
 788     NativeAccess<>::oop_store(ptr, oop(NULL));
 789   }
 790 }
 791 
 792 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) {
 793   MutexLocker ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 794   if (dest.resolve() != NULL) {
 795     return;
 796   } else {
 797     dest = _handles.add(h());
 798   }
 799 }
 800 
 801 // Add this metadata pointer to be freed when it's safe.  This is only during
 802 // a safepoint which checks if handles point to this metadata field.
 803 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
 804   // Metadata in shared region isn't deleted.
 805   if (!m->is_shared()) {
 806     MutexLocker ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 807     if (_deallocate_list == NULL) {
 808       _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, mtClass);
 809     }
 810     _deallocate_list->append_if_missing(m);
 811     log_debug(class, loader, data)("deallocate added for %s", m->print_value_string());
 812     ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 813   }
 814 }
 815 
 816 // Deallocate free metadata on the free list.  How useful the PermGen was!
 817 void ClassLoaderData::free_deallocate_list() {
 818   // This must be called at a safepoint because it depends on metadata walking at
 819   // safepoint cleanup time.
 820   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 821   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 822   if (_deallocate_list == NULL) {
 823     return;
 824   }
 825   // Go backwards because this removes entries that are freed.
 826   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 827     Metadata* m = _deallocate_list->at(i);
 828     if (!m->on_stack()) {
 829       _deallocate_list->remove_at(i);
 830       // There are only three types of metadata that we deallocate directly.
 831       // Cast them so they can be used by the template function.
 832       if (m->is_method()) {
 833         MetadataFactory::free_metadata(this, (Method*)m);
 834       } else if (m->is_constantPool()) {
 835         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 836       } else if (m->is_klass()) {
 837         MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 838       } else {
 839         ShouldNotReachHere();
 840       }
 841     } else {
 842       // Metadata is alive.
 843       // If scratch_class is on stack then it shouldn't be on this list!
 844       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 845              "scratch classes on this list should be dead");
 846       // Also should assert that other metadata on the list was found in handles.
 847       // Some cleaning remains.
 848       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 849     }
 850   }
 851 }
 852 
 853 // This is distinct from free_deallocate_list.  For class loader data that are
 854 // unloading, this frees the C heap memory for items on the list, and unlinks
 855 // scratch or error classes so that unloading events aren't triggered for these
 856 // classes. The metadata is removed with the unloading metaspace.
 857 // There isn't C heap memory allocated for methods, so nothing is done for them.
 858 void ClassLoaderData::free_deallocate_list_C_heap_structures() {
 859   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
 860   assert(is_unloading(), "only called for ClassLoaderData that are unloading");
 861   if (_deallocate_list == NULL) {
 862     return;
 863   }
 864   // Go backwards because this removes entries that are freed.
 865   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 866     Metadata* m = _deallocate_list->at(i);
 867     _deallocate_list->remove_at(i);
 868     if (m->is_constantPool()) {
 869       ((ConstantPool*)m)->release_C_heap_structures();
 870     } else if (m->is_klass()) {
 871       InstanceKlass* ik = (InstanceKlass*)m;
 872       // also releases ik->constants() C heap memory
 873       ik->release_C_heap_structures();
 874       // Remove the class so unloading events aren't triggered for
 875       // this class (scratch or error class) in do_unloading().
 876       remove_class(ik);
 877     }
 878   }
 879 }
 880 
 881 // Caller needs ResourceMark
 882 // If the class loader's _name has not been explicitly set, the class loader's
 883 // qualified class name is returned.
 884 const char* ClassLoaderData::loader_name() const {
 885    if (_class_loader_klass == NULL) {
 886      return BOOTSTRAP_LOADER_NAME;
 887    } else if (_name != NULL) {
 888      return _name->as_C_string();
 889    } else {
 890      return _class_loader_klass->external_name();
 891    }
 892 }
 893 
 894 // Caller needs ResourceMark
 895 // Format of the _name_and_id is as follows:
 896 //   If the defining loader has a name explicitly set then '<loader-name>' @<id>
 897 //   If the defining loader has no name then <qualified-class-name> @<id>
 898 //   If built-in loader, then omit '@<id>' as there is only one instance.
 899 const char* ClassLoaderData::loader_name_and_id() const {
 900   if (_class_loader_klass == NULL) {
 901     return "'" BOOTSTRAP_LOADER_NAME "'";
 902   } else if (_name_and_id != NULL) {
 903     return _name_and_id->as_C_string();
 904   } else {
 905     // May be called in a race before _name_and_id is initialized.
 906     return _class_loader_klass->external_name();
 907   }
 908 }
 909 
 910 void ClassLoaderData::print_value_on(outputStream* out) const {
 911   if (!is_unloading() && class_loader() != NULL) {
 912     out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this));
 913     class_loader()->print_value_on(out);  // includes loader_name_and_id() and address of class loader instance
 914   } else {
 915     // loader data: 0xsomeaddr of 'bootstrap'
 916     out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name_and_id());
 917   }
 918   if (_has_class_mirror_holder) {
 919     out->print(" has a class holder");
 920   }
 921 }
 922 
 923 void ClassLoaderData::print_value() const { print_value_on(tty); }
 924 
 925 #ifndef PRODUCT
 926 void ClassLoaderData::print_on(outputStream* out) const {
 927   out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: %s {",
 928               p2i(this), p2i(_class_loader.ptr_raw()), loader_name_and_id());
 929   if (has_class_mirror_holder()) out->print(" has a class holder");
 930   if (claimed()) out->print(" claimed");
 931   if (is_unloading()) out->print(" unloading");
 932   out->print(" metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null()));
 933 
 934   if (_jmethod_ids != NULL) {
 935     Method::print_jmethod_ids(this, out);
 936   }
 937   out->print(" handles count %d", _handles.count());
 938   out->print(" dependencies %d", _dependency_count);
 939   out->print_cr("}");
 940 }
 941 #endif // PRODUCT
 942 
 943 void ClassLoaderData::print() const { print_on(tty); }
 944 
 945 void ClassLoaderData::verify() {
 946   assert_locked_or_safepoint(_metaspace_lock);
 947   oop cl = class_loader();
 948 
 949   guarantee(this == class_loader_data(cl) || has_class_mirror_holder(), "Must be the same");
 950   guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || has_class_mirror_holder(), "must be");
 951 
 952   // Verify the integrity of the allocated space.
 953   if (metaspace_or_null() != NULL) {
 954     metaspace_or_null()->verify();
 955   }
 956 
 957   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 958     guarantee(k->class_loader_data() == this, "Must be the same");
 959     k->verify();
 960     assert(k != k->next_link(), "no loops!");
 961   }
 962 }
 963 
 964 bool ClassLoaderData::contains_klass(Klass* klass) {
 965   // Lock-free access requires load_acquire
 966   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 967     if (k == klass) return true;
 968   }
 969   return false;
 970 }
 971 
 972 Klass* ClassLoaderData::find_class(Symbol* name) {
 973   Dictionary* dict = dictionary();
 974   unsigned hash  = dict->compute_hash(name);
 975   int      index = dict->hash_to_index(hash);
 976   {
 977     // find_class assert on SystemDictionary_lock or safepoint
 978     MutexLocker lock(SystemDictionary_lock);
 979     return dict->find_class(index, hash, name);
 980   }
 981 }