1  /*
   2  * Copyright (c) 2012, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 // A ClassLoaderData identifies the full set of class types that a class
  26 // loader's name resolution strategy produces for a given configuration of the
  27 // class loader.
  28 // Class types in the ClassLoaderData may be defined by from class file binaries
  29 // provided by the class loader, or from other class loader it interacts with
  30 // according to its name resolution strategy.
  31 //
  32 // Class loaders that implement a deterministic name resolution strategy
  33 // (including with respect to their delegation behavior), such as the boot, the
  34 // platform, and the system loaders of the JDK's built-in class loader
  35 // hierarchy, always produce the same linkset for a given configuration.
  36 //
  37 // ClassLoaderData carries information related to a linkset (e.g.,
  38 // metaspace holding its klass definitions).
  39 // The System Dictionary and related data structures (e.g., placeholder table,
  40 // loader constraints table) as well as the runtime representation of classes
  41 // only reference ClassLoaderData.
  42 //
  43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that
  44 // that represent the loader's "linking domain" in the JVM.
  45 //
  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"
  50 #include "classfile/classLoaderData.hpp"
  51 #include "classfile/classLoaderData.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/metadataOnStackMark.hpp"
  55 #include "classfile/moduleEntry.hpp"
  56 #include "classfile/packageEntry.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "code/codeCache.hpp"
  59 #include "gc/shared/gcLocker.hpp"
  60 #include "logging/log.hpp"
  61 #include "memory/metadataFactory.hpp"
  62 #include "memory/metaspaceShared.hpp"
  63 #include "memory/oopFactory.hpp"
  64 #include "memory/resourceArea.hpp"
  65 #include "oops/objArrayOop.inline.hpp"
  66 #include "oops/oop.inline.hpp"
  67 #include "runtime/atomic.hpp"
  68 #include "runtime/javaCalls.hpp"
  69 #include "runtime/jniHandles.hpp"
  70 #include "runtime/mutex.hpp"
  71 #include "runtime/orderAccess.hpp"
  72 #include "runtime/safepoint.hpp"
  73 #include "runtime/synchronizer.hpp"
  74 #include "utilities/growableArray.hpp"
  75 #include "utilities/macros.hpp"
  76 #include "utilities/ostream.hpp"
  77 #if INCLUDE_TRACE
  78 #include "trace/tracing.hpp"
  79 #endif
  80 
  81 // helper function to avoid in-line casts
  82 template <typename T> static T* load_ptr_acquire(T* volatile *p) {
  83   return static_cast<T*>(OrderAccess::load_ptr_acquire(p));
  84 }
  85 
  86 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  87 
  88 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) :
  89   _class_loader(h_class_loader()),
  90   _is_anonymous(is_anonymous),
  91   // An anonymous class loader data doesn't have anything to keep
  92   // it from being unloaded during parsing of the anonymous class.
  93   // The null-class-loader should always be kept alive.
  94   _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0),
  95   _metaspace(NULL), _unloading(false), _klasses(NULL),
  96   _modules(NULL), _packages(NULL),
  97   _claimed(0), _jmethod_ids(NULL), _handles(), _deallocate_list(NULL),
  98   _next(NULL), _dependencies(dependencies),
  99   _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true,
 100                             Monitor::_safepoint_check_never)) {
 101 
 102   // A ClassLoaderData created solely for an anonymous class should never have a
 103   // ModuleEntryTable or PackageEntryTable created for it. The defining package
 104   // and module for an anonymous class will be found in its host class.
 105   if (!is_anonymous) {
 106     _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size);
 107     if (h_class_loader.is_null()) {
 108       // Create unnamed module for boot loader
 109       _unnamed_module = ModuleEntry::create_boot_unnamed_module(this);
 110     } else {
 111       // Create unnamed module for all other loaders
 112       _unnamed_module = ModuleEntry::create_unnamed_module(this);
 113     }
 114   } else {
 115     _unnamed_module = NULL;
 116   }
 117 
 118   if (!is_anonymous) {
 119     _dictionary = create_dictionary();
 120   } else {
 121     _dictionary = NULL;
 122   }
 123   TRACE_INIT_ID(this);
 124 }
 125 
 126 void ClassLoaderData::init_dependencies(TRAPS) {
 127   assert(!Universe::is_fully_initialized(), "should only be called when initializing");
 128   assert(is_the_null_class_loader_data(), "should only call this for the null class loader");
 129   _dependencies.init(CHECK);
 130 }
 131 
 132 void ClassLoaderData::Dependencies::init(TRAPS) {
 133   // Create empty dependencies array to add to. CMS requires this to be
 134   // an oop so that it can track additions via card marks.  We think.
 135   _list_head = oopFactory::new_objectArray(2, CHECK);
 136 }
 137 
 138 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
 139   Chunk* c = _head;
 140   while (c != NULL) {
 141     Chunk* next = c->_next;
 142     delete c;
 143     c = next;
 144   }
 145 }
 146 
 147 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
 148   if (_head == NULL || _head->_size == Chunk::CAPACITY) {
 149     Chunk* next = new Chunk(_head);
 150     OrderAccess::release_store_ptr(&_head, next);
 151   }
 152   oop* handle = &_head->_data[_head->_size];
 153   *handle = o;
 154   OrderAccess::release_store(&_head->_size, _head->_size + 1);
 155   return handle;
 156 }
 157 
 158 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
 159   for (juint i = 0; i < size; i++) {
 160     if (c->_data[i] != NULL) {
 161       f->do_oop(&c->_data[i]);
 162     }
 163   }
 164 }
 165 
 166 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
 167   Chunk* head = (Chunk*) OrderAccess::load_ptr_acquire(&_head);
 168   if (head != NULL) {
 169     // Must be careful when reading size of head
 170     oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
 171     for (Chunk* c = head->_next; c != NULL; c = c->_next) {
 172       oops_do_chunk(f, c, c->_size);
 173     }
 174   }
 175 }
 176 
 177 #ifdef ASSERT
 178 class VerifyContainsOopClosure : public OopClosure {
 179   oop* _target;
 180   bool _found;
 181 
 182  public:
 183   VerifyContainsOopClosure(oop* target) : _target(target), _found(false) {}
 184 
 185   void do_oop(oop* p) {
 186     if (p == _target) {
 187       _found = true;
 188     }
 189   }
 190 
 191   void do_oop(narrowOop* p) {
 192     // The ChunkedHandleList should not contain any narrowOop
 193     ShouldNotReachHere();
 194   }
 195 
 196   bool found() const {
 197     return _found;
 198   }
 199 };
 200 
 201 bool ClassLoaderData::ChunkedHandleList::contains(oop* p) {
 202   VerifyContainsOopClosure cl(p);
 203   oops_do(&cl);
 204   return cl.found();
 205 }
 206 #endif
 207 
 208 bool ClassLoaderData::claim() {
 209   if (_claimed == 1) {
 210     return false;
 211   }
 212 
 213   return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0;
 214 }
 215 
 216 // Anonymous classes have their own ClassLoaderData that is marked to keep alive
 217 // while the class is being parsed, and if the class appears on the module fixup list.
 218 // Due to the uniqueness that no other class shares the anonymous class' name or
 219 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while
 220 // it is being defined, therefore _keep_alive is not volatile or atomic.
 221 void ClassLoaderData::inc_keep_alive() {
 222   if (is_anonymous()) {
 223     assert(_keep_alive >= 0, "Invalid keep alive increment count");
 224     _keep_alive++;
 225   }
 226 }
 227 
 228 void ClassLoaderData::dec_keep_alive() {
 229   if (is_anonymous()) {
 230     assert(_keep_alive > 0, "Invalid keep alive decrement count");
 231     _keep_alive--;
 232   }
 233 }
 234 
 235 void ClassLoaderData::oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) {
 236   if (must_claim && !claim()) {
 237     return;
 238   }
 239 
 240   f->do_oop(&_class_loader);
 241   _dependencies.oops_do(f);
 242 
 243   _handles.oops_do(f);
 244 
 245   if (klass_closure != NULL) {
 246     classes_do(klass_closure);
 247   }
 248 }
 249 
 250 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) {
 251   f->do_oop((oop*)&_list_head);
 252 }
 253 
 254 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
 255   // Lock-free access requires load_ptr_acquire
 256   for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
 257     klass_closure->do_klass(k);
 258     assert(k != k->next_link(), "no loops!");
 259   }
 260 }
 261 
 262 void ClassLoaderData::classes_do(void f(Klass * const)) {
 263   // Lock-free access requires load_ptr_acquire
 264   for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
 265     f(k);
 266     assert(k != k->next_link(), "no loops!");
 267   }
 268 }
 269 
 270 void ClassLoaderData::methods_do(void f(Method*)) {
 271   // Lock-free access requires load_ptr_acquire
 272   for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
 273     if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
 274       InstanceKlass::cast(k)->methods_do(f);
 275     }
 276   }
 277 }
 278 
 279 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
 280   // Lock-free access requires load_ptr_acquire
 281   for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
 282     // Do not filter ArrayKlass oops here...
 283     if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
 284       klass_closure->do_klass(k);
 285     }
 286   }
 287 }
 288 
 289 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 290   // Lock-free access requires load_ptr_acquire
 291   for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
 292     if (k->is_instance_klass()) {
 293       f(InstanceKlass::cast(k));
 294     }
 295     assert(k != k->next_link(), "no loops!");
 296   }
 297 }
 298 
 299 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 300   assert_locked_or_safepoint(Module_lock);
 301   if (_unnamed_module != NULL) {
 302     f(_unnamed_module);
 303   }
 304   if (_modules != NULL) {
 305     for (int i = 0; i < _modules->table_size(); i++) {
 306       for (ModuleEntry* entry = _modules->bucket(i);
 307            entry != NULL;
 308            entry = entry->next()) {
 309         f(entry);
 310       }
 311     }
 312   }
 313 }
 314 
 315 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 316   assert_locked_or_safepoint(Module_lock);
 317   if (_packages != NULL) {
 318     for (int i = 0; i < _packages->table_size(); i++) {
 319       for (PackageEntry* entry = _packages->bucket(i);
 320            entry != NULL;
 321            entry = entry->next()) {
 322         f(entry);
 323       }
 324     }
 325   }
 326 }
 327 
 328 void ClassLoaderData::record_dependency(const Klass* k, TRAPS) {
 329   assert(k != NULL, "invariant");
 330 
 331   ClassLoaderData * const from_cld = this;
 332   ClassLoaderData * const to_cld = k->class_loader_data();
 333 
 334   // Dependency to the null class loader data doesn't need to be recorded
 335   // because the null class loader data never goes away.
 336   if (to_cld->is_the_null_class_loader_data()) {
 337     return;
 338   }
 339 
 340   oop to;
 341   if (to_cld->is_anonymous()) {
 342     // Anonymous class dependencies are through the mirror.
 343     to = k->java_mirror();
 344   } else {
 345     to = to_cld->class_loader();
 346 
 347     // If from_cld is anonymous, even if it's class_loader is a parent of 'to'
 348     // we still have to add it.  The class_loader won't keep from_cld alive.
 349     if (!from_cld->is_anonymous()) {
 350       // Check that this dependency isn't from the same or parent class_loader
 351       oop from = from_cld->class_loader();
 352 
 353       oop curr = from;
 354       while (curr != NULL) {
 355         if (curr == to) {
 356           return; // this class loader is in the parent list, no need to add it.
 357         }
 358         curr = java_lang_ClassLoader::parent(curr);
 359       }
 360     }
 361   }
 362 
 363   // It's a dependency we won't find through GC, add it. This is relatively rare
 364   // Must handle over GC point.
 365   Handle dependency(THREAD, to);
 366   from_cld->_dependencies.add(dependency, CHECK);
 367 }
 368 
 369 
 370 void ClassLoaderData::Dependencies::add(Handle dependency, TRAPS) {
 371   // Check first if this dependency is already in the list.
 372   // Save a pointer to the last to add to under the lock.
 373   objArrayOop ok = _list_head;
 374   objArrayOop last = NULL;
 375   while (ok != NULL) {
 376     last = ok;
 377     if (ok->obj_at(0) == dependency()) {
 378       // Don't need to add it
 379       return;
 380     }
 381     ok = (objArrayOop)ok->obj_at(1);
 382   }
 383 
 384   // Must handle over GC points
 385   assert (last != NULL, "dependencies should be initialized");
 386   objArrayHandle last_handle(THREAD, last);
 387 
 388   // Create a new dependency node with fields for (class_loader or mirror, next)
 389   objArrayOop deps = oopFactory::new_objectArray(2, CHECK);
 390   deps->obj_at_put(0, dependency());
 391 
 392   // Must handle over GC points
 393   objArrayHandle new_dependency(THREAD, deps);
 394 
 395   // Add the dependency under lock
 396   locked_add(last_handle, new_dependency, THREAD);
 397 }
 398 
 399 void ClassLoaderData::Dependencies::locked_add(objArrayHandle last_handle,
 400                                                objArrayHandle new_dependency,
 401                                                Thread* THREAD) {
 402 
 403   // Have to lock and put the new dependency on the end of the dependency
 404   // array so the card mark for CMS sees that this dependency is new.
 405   // Can probably do this lock free with some effort.
 406   ObjectLocker ol(Handle(THREAD, _list_head), THREAD);
 407 
 408   oop loader_or_mirror = new_dependency->obj_at(0);
 409 
 410   // Since the dependencies are only added, add to the end.
 411   objArrayOop end = last_handle();
 412   objArrayOop last = NULL;
 413   while (end != NULL) {
 414     last = end;
 415     // check again if another thread added it to the end.
 416     if (end->obj_at(0) == loader_or_mirror) {
 417       // Don't need to add it
 418       return;
 419     }
 420     end = (objArrayOop)end->obj_at(1);
 421   }
 422   assert (last != NULL, "dependencies should be initialized");
 423   // fill in the first element with the oop in new_dependency.
 424   if (last->obj_at(0) == NULL) {
 425     last->obj_at_put(0, new_dependency->obj_at(0));
 426   } else {
 427     last->obj_at_put(1, new_dependency());
 428   }
 429 }
 430 
 431 void ClassLoaderDataGraph::clear_claimed_marks() {
 432   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
 433     cld->clear_claimed();
 434   }
 435 }
 436 
 437 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
 438   {
 439     MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 440     Klass* old_value = _klasses;
 441     k->set_next_link(old_value);
 442     // Link the new item into the list, making sure the linked class is stable
 443     // since the list can be walked without a lock
 444     OrderAccess::release_store_ptr(&_klasses, k);
 445   }
 446 
 447   if (publicize && k->class_loader_data() != NULL) {
 448     ResourceMark rm;
 449     log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: "
 450                   PTR_FORMAT " loader: " PTR_FORMAT " %s",
 451                   p2i(k),
 452                   k->external_name(),
 453                   p2i(k->class_loader_data()),
 454                   p2i((void *)k->class_loader()),
 455                   loader_name());
 456   }
 457 }
 458 
 459 // Class iterator used by the compiler.  It gets some number of classes at
 460 // a safepoint to decay invocation counters on the methods.
 461 class ClassLoaderDataGraphKlassIteratorStatic {
 462   ClassLoaderData* _current_loader_data;
 463   Klass*           _current_class_entry;
 464  public:
 465 
 466   ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {}
 467 
 468   InstanceKlass* try_get_next_class() {
 469     assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 470     while (true) {
 471   
 472       if (_current_class_entry != NULL) {
 473         Klass* k = _current_class_entry;
 474         _current_class_entry = _current_class_entry->next_link();
 475   
 476         if (k->is_instance_klass()) {
 477           InstanceKlass* ik = InstanceKlass::cast(k);
 478           // Only return loaded classes
 479           if (ik->is_loaded()) {
 480             return ik;
 481           }
 482         }
 483       } else {
 484         // Go to next CLD
 485         if (_current_loader_data != NULL) {
 486           _current_loader_data = _current_loader_data->next();
 487         }
 488         // Start at the beginning
 489         if (_current_loader_data == NULL) {
 490           _current_loader_data = ClassLoaderDataGraph::_head;
 491         }
 492   
 493         _current_class_entry = _current_loader_data->klasses();
 494       }
 495     }
 496     // never reached: an InstanceKlass should be returned above
 497   }
 498 
 499   // If the current class for the static iterator is a class being unloaded or
 500   // deallocated, adjust the current class.
 501   void adjust_saved_class(ClassLoaderData* cld) {
 502     if (_current_loader_data == cld) {
 503       _current_loader_data = cld->next();
 504       if (_current_loader_data != NULL) {
 505         _current_class_entry = _current_loader_data->klasses();
 506       }  // else try_get_next_class will start at the head
 507     }
 508   }
 509 
 510   void adjust_saved_class(Klass* klass) {
 511     if (_current_class_entry == klass) {
 512       _current_class_entry = klass->next_link();
 513     }
 514   }
 515 };
 516 
 517 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator;
 518 
 519 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() {
 520   return static_klass_iterator.try_get_next_class();
 521 }
 522 
 523 
 524 // Remove a klass from the _klasses list for scratch_class during redefinition
 525 // or parsed class in the case of an error.
 526 void ClassLoaderData::remove_class(Klass* scratch_class) {
 527   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 528 
 529   // Adjust global class iterator.
 530   static_klass_iterator.adjust_saved_class(scratch_class);
 531 
 532   Klass* prev = NULL;
 533   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 534     if (k == scratch_class) {
 535       if (prev == NULL) {
 536         _klasses = k->next_link();
 537       } else {
 538         Klass* next = k->next_link();
 539         prev->set_next_link(next);
 540       }
 541       return;
 542     }
 543     prev = k;
 544     assert(k != k->next_link(), "no loops!");
 545   }
 546   ShouldNotReachHere();   // should have found this class!!
 547 }
 548 
 549 void ClassLoaderData::unload() {
 550   _unloading = true;
 551 
 552   // Tell serviceability tools these classes are unloading
 553   classes_do(InstanceKlass::notify_unload_class);
 554 
 555   if (log_is_enabled(Debug, class, loader, data)) {
 556     ResourceMark rm;
 557     outputStream* log = Log(class, loader, data)::debug_stream();
 558     log->print(": unload loader data " INTPTR_FORMAT, p2i(this));
 559     log->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()),
 560                loader_name());
 561     if (is_anonymous()) {
 562       log->print(" for anonymous class  " INTPTR_FORMAT " ", p2i(_klasses));
 563     }
 564     log->cr();
 565   }
 566 
 567   // In some rare cases items added to this list will not be freed elsewhere.
 568   // To keep it simple, just free everything in it here.
 569   free_deallocate_list();
 570 
 571   // Clean up global class iterator for compiler
 572   static_klass_iterator.adjust_saved_class(this);
 573 }
 574 
 575 ModuleEntryTable* ClassLoaderData::modules() {
 576   // Lazily create the module entry table at first request.
 577   // Lock-free access requires load_ptr_acquire.
 578   ModuleEntryTable* modules = load_ptr_acquire(&_modules);
 579   if (modules == NULL) {
 580     MutexLocker m1(Module_lock);
 581     // Check if _modules got allocated while we were waiting for this lock.
 582     if ((modules = _modules) == NULL) {
 583       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 584 
 585       {
 586         MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 587         // Ensure _modules is stable, since it is examined without a lock
 588         OrderAccess::release_store_ptr(&_modules, modules);
 589       }
 590     }
 591   }
 592   return modules;
 593 }
 594 
 595 const int _boot_loader_dictionary_size    = 1009;
 596 const int _default_loader_dictionary_size = 107;
 597 const int _prime_array_size         = 8;                       // array of primes for system dictionary size
 598 const int _average_depth_goal       = 3;                       // goal for lookup length
 599 const int _primelist[_prime_array_size] = {107, 1009, 2017, 4049, 5051, 10103, 20201, 40423};
 600 
 601 // Calculate a "good" dictionary size based
 602 // on predicted or current loaded classes count.
 603 static int calculate_dictionary_size(int classcount) {
 604   int newsize = _primelist[0];
 605   if (classcount > 0 && !DumpSharedSpaces) {
 606     int index = 0;
 607     int desiredsize = classcount/_average_depth_goal;
 608     for (newsize = _primelist[index]; index < _prime_array_size -1;
 609          newsize = _primelist[++index]) {
 610       if (desiredsize <=  newsize) {
 611         break;
 612       }
 613     }
 614   }
 615   return newsize;
 616 }
 617 
 618 Dictionary* ClassLoaderData::create_dictionary() {
 619   assert(!is_anonymous(), "anonymous class loader data do not have a dictionary");
 620   int size;
 621   if (_the_null_class_loader_data == NULL) {
 622     size = _boot_loader_dictionary_size;
 623   } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 624     size = 1;  // there's only one class in relection class loader and no initiated classes
 625   } else if (is_system_class_loader_data()) {
 626     size = calculate_dictionary_size(PredictedLoadedClassCount);
 627   } else {
 628     size = _default_loader_dictionary_size;
 629   }
 630   return new Dictionary(this, size);
 631 }
 632 
 633 // Unloading support
 634 oop ClassLoaderData::keep_alive_object() const {
 635   assert_locked_or_safepoint(_metaspace_lock);
 636   assert(!keep_alive(), "Don't use with CLDs that are artificially kept alive");
 637   return is_anonymous() ? _klasses->java_mirror() : class_loader();
 638 }
 639 
 640 bool ClassLoaderData::is_alive(BoolObjectClosure* is_alive_closure) const {
 641   bool alive = keep_alive() // null class loader and incomplete anonymous klasses.
 642       || is_alive_closure->do_object_b(keep_alive_object());
 643 
 644   return alive;
 645 }
 646 
 647 ClassLoaderData::~ClassLoaderData() {
 648   // Release C heap structures for all the classes.
 649   classes_do(InstanceKlass::release_C_heap_structures);
 650 
 651   // Release C heap allocated hashtable for all the packages.
 652   if (_packages != NULL) {
 653     // Destroy the table itself
 654     delete _packages;
 655     _packages = NULL;
 656   }
 657 
 658   // Release C heap allocated hashtable for all the modules.
 659   if (_modules != NULL) {
 660     // Destroy the table itself
 661     delete _modules;
 662     _modules = NULL;
 663   }
 664 
 665   // Release C heap allocated hashtable for the dictionary
 666   if (_dictionary != NULL) {
 667     // Destroy the table itself
 668     delete _dictionary;
 669     _dictionary = NULL;
 670   }
 671 
 672   if (_unnamed_module != NULL) {
 673     _unnamed_module->delete_unnamed_module();
 674     _unnamed_module = NULL;
 675   }
 676 
 677   // release the metaspace
 678   Metaspace *m = _metaspace;
 679   if (m != NULL) {
 680     _metaspace = NULL;
 681     delete m;
 682   }
 683   // Clear all the JNI handles for methods
 684   // These aren't deallocated and are going to look like a leak, but that's
 685   // needed because we can't really get rid of jmethodIDs because we don't
 686   // know when native code is going to stop using them.  The spec says that
 687   // they're "invalid" but existing programs likely rely on their being
 688   // NULL after class unloading.
 689   if (_jmethod_ids != NULL) {
 690     Method::clear_jmethod_ids(this);
 691   }
 692   // Delete lock
 693   delete _metaspace_lock;
 694 
 695   // Delete free list
 696   if (_deallocate_list != NULL) {
 697     delete _deallocate_list;
 698   }
 699 }
 700 
 701 // Returns true if this class loader data is for the system class loader.
 702 bool ClassLoaderData::is_system_class_loader_data() const {
 703   return SystemDictionary::is_system_class_loader(class_loader());
 704 }
 705 
 706 // Returns true if this class loader data is for the platform class loader.
 707 bool ClassLoaderData::is_platform_class_loader_data() const {
 708   return SystemDictionary::is_platform_class_loader(class_loader());
 709 }
 710 
 711 // Returns true if this class loader data is one of the 3 builtin
 712 // (boot, application/system or platform) class loaders. Note, the
 713 // builtin loaders are not freed by a GC.
 714 bool ClassLoaderData::is_builtin_class_loader_data() const {
 715   return (is_the_null_class_loader_data() ||
 716           SystemDictionary::is_system_class_loader(class_loader()) ||
 717           SystemDictionary::is_platform_class_loader(class_loader()));
 718 }
 719 
 720 Metaspace* ClassLoaderData::metaspace_non_null() {
 721   assert(!DumpSharedSpaces, "wrong metaspace!");
 722   // If the metaspace has not been allocated, create a new one.  Might want
 723   // to create smaller arena for Reflection class loaders also.
 724   // The reason for the delayed allocation is because some class loaders are
 725   // simply for delegating with no metadata of their own.
 726   // Lock-free access requires load_ptr_acquire.
 727   Metaspace* metaspace = load_ptr_acquire(&_metaspace);
 728   if (metaspace == NULL) {
 729     MutexLockerEx ml(_metaspace_lock,  Mutex::_no_safepoint_check_flag);
 730     // Check if _metaspace got allocated while we were waiting for this lock.
 731     if ((metaspace = _metaspace) == NULL) {
 732       if (this == the_null_class_loader_data()) {
 733         assert (class_loader() == NULL, "Must be");
 734         metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
 735       } else if (is_anonymous()) {
 736         if (class_loader() != NULL) {
 737           log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name());
 738         }
 739         metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType);
 740       } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 741         if (class_loader() != NULL) {
 742           log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name());
 743         }
 744         metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
 745       } else {
 746         metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
 747       }
 748       // Ensure _metaspace is stable, since it is examined without a lock
 749       OrderAccess::release_store_ptr(&_metaspace, metaspace);
 750     }
 751   }
 752   return metaspace;
 753 }
 754 
 755 jobject ClassLoaderData::add_handle(Handle h) {
 756   MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 757   return (jobject) _handles.add(h());
 758 }
 759 
 760 void ClassLoaderData::remove_handle_unsafe(jobject h) {
 761   assert(_handles.contains((oop*) h), "Got unexpected handle " PTR_FORMAT, p2i((oop*) h));
 762   *((oop*) h) = NULL;
 763 }
 764 
 765 // Add this metadata pointer to be freed when it's safe.  This is only during
 766 // class unloading because Handles might point to this metadata field.
 767 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
 768   // Metadata in shared region isn't deleted.
 769   if (!m->is_shared()) {
 770     MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 771     if (_deallocate_list == NULL) {
 772       _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true);
 773     }
 774     _deallocate_list->append_if_missing(m);
 775   }
 776 }
 777 
 778 // Deallocate free metadata on the free list.  How useful the PermGen was!
 779 void ClassLoaderData::free_deallocate_list() {
 780   // Don't need lock, at safepoint
 781   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 782   if (_deallocate_list == NULL) {
 783     return;
 784   }
 785   // Go backwards because this removes entries that are freed.
 786   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 787     Metadata* m = _deallocate_list->at(i);
 788     if (!m->on_stack()) {
 789       _deallocate_list->remove_at(i);
 790       // There are only three types of metadata that we deallocate directly.
 791       // Cast them so they can be used by the template function.
 792       if (m->is_method()) {
 793         MetadataFactory::free_metadata(this, (Method*)m);
 794       } else if (m->is_constantPool()) {
 795         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 796       } else if (m->is_klass()) {
 797         MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 798       } else {
 799         ShouldNotReachHere();
 800       }
 801     } else {
 802       // Metadata is alive.
 803       // If scratch_class is on stack then it shouldn't be on this list!
 804       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 805              "scratch classes on this list should be dead");
 806       // Also should assert that other metadata on the list was found in handles.
 807     }
 808   }
 809 }
 810 
 811 // These anonymous class loaders are to contain classes used for JSR292
 812 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(oop loader, TRAPS) {
 813   // Add a new class loader data to the graph.
 814   Handle lh(THREAD, loader);
 815   return ClassLoaderDataGraph::add(lh, true, THREAD);
 816 }
 817 
 818 const char* ClassLoaderData::loader_name() {
 819   // Handles null class loader
 820   return SystemDictionary::loader_name(class_loader());
 821 }
 822 
 823 #ifndef PRODUCT
 824 // Define to dump klasses
 825 #undef CLD_DUMP_KLASSES
 826 
 827 void ClassLoaderData::dump(outputStream * const out) {
 828   out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {",
 829       p2i(this), p2i((void *)class_loader()),
 830       p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name());
 831   if (claimed()) out->print(" claimed ");
 832   if (is_unloading()) out->print(" unloading ");
 833   out->cr();
 834   if (metaspace_or_null() != NULL) {
 835     out->print_cr("metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null()));
 836     metaspace_or_null()->dump(out);
 837   } else {
 838     out->print_cr("metaspace: NULL");
 839   }
 840 
 841 #ifdef CLD_DUMP_KLASSES
 842   if (Verbose) {
 843     Klass* k = _klasses;
 844     while (k != NULL) {
 845       out->print_cr("klass " PTR_FORMAT ", %s, CT: %d, MUT: %d", k, k->name()->as_C_string(),
 846           k->has_modified_oops(), k->has_accumulated_modified_oops());
 847       assert(k != k->next_link(), "no loops!");
 848       k = k->next_link();
 849     }
 850   }
 851 #endif  // CLD_DUMP_KLASSES
 852 #undef CLD_DUMP_KLASSES
 853   if (_jmethod_ids != NULL) {
 854     Method::print_jmethod_ids(this, out);
 855   }
 856   out->print_cr("}");
 857 }
 858 #endif // PRODUCT
 859 
 860 void ClassLoaderData::verify() {
 861   assert_locked_or_safepoint(_metaspace_lock);
 862   oop cl = class_loader();
 863 
 864   guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same");
 865   guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be");
 866 
 867   // Verify the integrity of the allocated space.
 868   if (metaspace_or_null() != NULL) {
 869     metaspace_or_null()->verify();
 870   }
 871 
 872   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 873     guarantee(k->class_loader_data() == this, "Must be the same");
 874     k->verify();
 875     assert(k != k->next_link(), "no loops!");
 876   }
 877 }
 878 
 879 bool ClassLoaderData::contains_klass(Klass* klass) {
 880   // Lock-free access requires load_ptr_acquire
 881   for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
 882     if (k == klass) return true;
 883   }
 884   return false;
 885 }
 886 
 887 
 888 // GC root of class loader data created.
 889 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
 890 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
 891 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
 892 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
 893 
 894 bool ClassLoaderDataGraph::_should_purge = false;
 895 bool ClassLoaderDataGraph::_metaspace_oom = false;
 896 
 897 // Add a new class loader data node to the list.  Assign the newly created
 898 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
 899 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) {
 900   // We need to allocate all the oops for the ClassLoaderData before allocating the
 901   // actual ClassLoaderData object.
 902   ClassLoaderData::Dependencies dependencies(CHECK_NULL);
 903 
 904   NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the
 905                                      // ClassLoaderData in the graph since the CLD
 906                                      // contains unhandled oops
 907 
 908   ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies);
 909 
 910 
 911   if (!is_anonymous) {
 912     ClassLoaderData** cld_addr = java_lang_ClassLoader::loader_data_addr(loader());
 913     // First, Atomically set it
 914     ClassLoaderData* old = (ClassLoaderData*) Atomic::cmpxchg_ptr(cld, cld_addr, NULL);
 915     if (old != NULL) {
 916       delete cld;
 917       // Returns the data.
 918       return old;
 919     }
 920   }
 921 
 922   // We won the race, and therefore the task of adding the data to the list of
 923   // class loader data
 924   ClassLoaderData** list_head = &_head;
 925   ClassLoaderData* next = _head;
 926 
 927   do {
 928     cld->set_next(next);
 929     ClassLoaderData* exchanged = (ClassLoaderData*)Atomic::cmpxchg_ptr(cld, list_head, next);
 930     if (exchanged == next) {
 931       if (log_is_enabled(Debug, class, loader, data)) {
 932        PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual
 933        log_creation(loader, cld, CHECK_NULL);
 934       }
 935       return cld;
 936     }
 937     next = exchanged;
 938   } while (true);
 939 }
 940 
 941 void ClassLoaderDataGraph::log_creation(Handle loader, ClassLoaderData* cld, TRAPS) {
 942   Handle string;
 943   if (loader.not_null()) {
 944     // Include the result of loader.toString() in the output. This allows
 945     // the user of the log to identify the class loader instance.
 946     JavaValue result(T_OBJECT);
 947     Klass* spec_klass = SystemDictionary::ClassLoader_klass();
 948     JavaCalls::call_virtual(&result,
 949                             loader,
 950                             spec_klass,
 951                             vmSymbols::toString_name(),
 952                             vmSymbols::void_string_signature(),
 953                             CHECK);
 954     assert(result.get_type() == T_OBJECT, "just checking");
 955     string = Handle(THREAD, (oop)result.get_jobject());
 956   }
 957 
 958   ResourceMark rm;
 959   outputStream* log = Log(class, loader, data)::debug_stream();
 960   log->print("create class loader data " INTPTR_FORMAT, p2i(cld));
 961   log->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)cld->class_loader()),
 962              cld->loader_name());
 963 
 964   if (string.not_null()) {
 965     log->print(": ");
 966     java_lang_String::print(string(), log);
 967   }
 968   log->cr();
 969 }
 970 
 971 
 972 void ClassLoaderDataGraph::oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) {
 973   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
 974     cld->oops_do(f, klass_closure, must_claim);
 975   }
 976 }
 977 
 978 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) {
 979   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
 980     if (cld->keep_alive()) {
 981       cld->oops_do(f, klass_closure, must_claim);
 982     }
 983   }
 984 }
 985 
 986 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) {
 987   if (ClassUnloading) {
 988     keep_alive_oops_do(f, klass_closure, must_claim);
 989   } else {
 990     oops_do(f, klass_closure, must_claim);
 991   }
 992 }
 993 
 994 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) {
 995   for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) {
 996     cl->do_cld(cld);
 997   }
 998 }
 999 
1000 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) {
1001   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1002   // Only walk the head until any clds not purged from prior unloading
1003   // (CMS doesn't purge right away).
1004   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1005     assert(cld->is_unloading(), "invariant");
1006     cl->do_cld(cld);
1007   }
1008 }
1009 
1010 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) {
1011   for (ClassLoaderData* cld = _head;  cld != NULL; cld = cld->_next) {
1012     CLDClosure* closure = cld->keep_alive() ? strong : weak;
1013     if (closure != NULL) {
1014       closure->do_cld(cld);
1015     }
1016   }
1017 }
1018 
1019 void ClassLoaderDataGraph::keep_alive_cld_do(CLDClosure* cl) {
1020   roots_cld_do(cl, NULL);
1021 }
1022 
1023 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) {
1024   if (ClassUnloading) {
1025     keep_alive_cld_do(cl);
1026   } else {
1027     cld_do(cl);
1028   }
1029 }
1030 
1031 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) {
1032   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1033     cld->classes_do(klass_closure);
1034   }
1035 }
1036 
1037 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) {
1038   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1039     cld->classes_do(f);
1040   }
1041 }
1042 
1043 void ClassLoaderDataGraph::methods_do(void f(Method*)) {
1044   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1045     cld->methods_do(f);
1046   }
1047 }
1048 
1049 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) {
1050   assert_locked_or_safepoint(Module_lock);
1051   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1052     cld->modules_do(f);
1053   }
1054 }
1055 
1056 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) {
1057   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1058   // Only walk the head until any clds not purged from prior unloading
1059   // (CMS doesn't purge right away).
1060   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1061     assert(cld->is_unloading(), "invariant");
1062     cld->modules_do(f);
1063   }
1064 }
1065 
1066 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) {
1067   assert_locked_or_safepoint(Module_lock);
1068   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1069     cld->packages_do(f);
1070   }
1071 }
1072 
1073 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) {
1074   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1075   // Only walk the head until any clds not purged from prior unloading
1076   // (CMS doesn't purge right away).
1077   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1078     assert(cld->is_unloading(), "invariant");
1079     cld->packages_do(f);
1080   }
1081 }
1082 
1083 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) {
1084   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1085     cld->loaded_classes_do(klass_closure);
1086   }
1087 }
1088 
1089 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) {
1090   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1091   // Only walk the head until any clds not purged from prior unloading
1092   // (CMS doesn't purge right away).
1093   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1094     assert(cld->is_unloading(), "invariant");
1095     cld->classes_do(f);
1096   }
1097 }
1098 
1099 #define FOR_ALL_DICTIONARY(X) for (ClassLoaderData* X = _head; X != NULL; X = X->next()) \
1100                                 if (X->dictionary() != NULL)
1101 
1102 // Walk classes in the loaded class dictionaries in various forms.
1103 // Only walks the classes defined in this class loader.
1104 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) {
1105   FOR_ALL_DICTIONARY(cld) {
1106     cld->dictionary()->classes_do(f);
1107   }
1108 }
1109 
1110 // Only walks the classes defined in this class loader.
1111 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) {
1112   FOR_ALL_DICTIONARY(cld) {
1113     cld->dictionary()->classes_do(f, CHECK);
1114   }
1115 }
1116 
1117 // Walks all entries in the dictionary including entries initiated by this class loader.
1118 void ClassLoaderDataGraph::dictionary_all_entries_do(void f(InstanceKlass*, ClassLoaderData*)) {
1119   FOR_ALL_DICTIONARY(cld) {
1120     cld->dictionary()->all_entries_do(f);
1121   }
1122 }
1123 
1124 void ClassLoaderDataGraph::verify_dictionary() {
1125   FOR_ALL_DICTIONARY(cld) {
1126     cld->dictionary()->verify();
1127   }
1128 }
1129 
1130 void ClassLoaderDataGraph::print_dictionary(bool details) {
1131   FOR_ALL_DICTIONARY(cld) {
1132     tty->print("Dictionary for class loader ");
1133     cld->print_value();
1134     tty->cr();
1135     cld->dictionary()->print(details);
1136   }
1137 }
1138 
1139 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() {
1140   assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?");
1141 
1142   GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>();
1143 
1144   // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true);
1145   ClassLoaderData* curr = _head;
1146   while (curr != _saved_head) {
1147     if (!curr->claimed()) {
1148       array->push(curr);
1149 
1150       if (log_is_enabled(Debug, class, loader, data)) {
1151         outputStream* log = Log(class, loader, data)::debug_stream();
1152         log->print("found new CLD: ");
1153         curr->print_value_on(log);
1154         log->cr();
1155       }
1156     }
1157 
1158     curr = curr->_next;
1159   }
1160 
1161   return array;
1162 }
1163 
1164 bool ClassLoaderDataGraph::unload_list_contains(const void* x) {
1165   assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint");
1166   for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) {
1167     if (cld->metaspace_or_null() != NULL && cld->metaspace_or_null()->contains(x)) {
1168       return true;
1169     }
1170   }
1171   return false;
1172 }
1173 
1174 #ifndef PRODUCT
1175 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) {
1176   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1177     if (loader_data == data) {
1178       return true;
1179     }
1180   }
1181 
1182   return false;
1183 }
1184 #endif // PRODUCT
1185 
1186 
1187 // Move class loader data from main list to the unloaded list for unloading
1188 // and deallocation later.
1189 bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure,
1190                                         bool clean_previous_versions) {
1191 
1192   ClassLoaderData* data = _head;
1193   ClassLoaderData* prev = NULL;
1194   bool seen_dead_loader = false;
1195 
1196   // Mark metadata seen on the stack only so we can delete unneeded entries.
1197   // Only walk all metadata, including the expensive code cache walk, for Full GC
1198   // and only if class redefinition and if there's previous versions of
1199   // Klasses to delete.
1200   bool walk_all_metadata = clean_previous_versions &&
1201                            JvmtiExport::has_redefined_a_class() &&
1202                            InstanceKlass::has_previous_versions_and_reset();
1203   MetadataOnStackMark md_on_stack(walk_all_metadata);
1204 
1205   // Save previous _unloading pointer for CMS which may add to unloading list before
1206   // purging and we don't want to rewalk the previously unloaded class loader data.
1207   _saved_unloading = _unloading;
1208 
1209   data = _head;
1210   while (data != NULL) {
1211     if (data->is_alive(is_alive_closure)) {
1212       // clean metaspace
1213       if (walk_all_metadata) {
1214         data->classes_do(InstanceKlass::purge_previous_versions);
1215       }
1216       data->free_deallocate_list();
1217       prev = data;
1218       data = data->next();
1219       continue;
1220     }
1221     seen_dead_loader = true;
1222     ClassLoaderData* dead = data;
1223     dead->unload();
1224     data = data->next();
1225     // Remove from loader list.
1226     // This class loader data will no longer be found
1227     // in the ClassLoaderDataGraph.
1228     if (prev != NULL) {
1229       prev->set_next(data);
1230     } else {
1231       assert(dead == _head, "sanity check");
1232       _head = data;
1233     }
1234     dead->set_next(_unloading);
1235     _unloading = dead;
1236   }
1237 
1238   if (seen_dead_loader) {
1239     data = _head;
1240     while (data != NULL) {
1241       // Remove entries in the dictionary of live class loader that have
1242       // initiated loading classes in a dead class loader.
1243       if (data->dictionary() != NULL) {
1244         data->dictionary()->do_unloading();
1245       }
1246       // Walk a ModuleEntry's reads, and a PackageEntry's exports
1247       // lists to determine if there are modules on those lists that are now
1248       // dead and should be removed.  A module's life cycle is equivalent
1249       // to its defining class loader's life cycle.  Since a module is
1250       // considered dead if its class loader is dead, these walks must
1251       // occur after each class loader's aliveness is determined.
1252       if (data->packages() != NULL) {
1253         data->packages()->purge_all_package_exports();
1254       }
1255       if (data->modules_defined()) {
1256         data->modules()->purge_all_module_reads();
1257       }
1258       data = data->next();
1259     }
1260 
1261     post_class_unload_events();
1262   }
1263 
1264   return seen_dead_loader;
1265 }
1266 
1267 void ClassLoaderDataGraph::purge() {
1268   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1269   ClassLoaderData* list = _unloading;
1270   _unloading = NULL;
1271   ClassLoaderData* next = list;
1272   bool classes_unloaded = false;
1273   while (next != NULL) {
1274     ClassLoaderData* purge_me = next;
1275     next = purge_me->next();
1276     delete purge_me;
1277     classes_unloaded = true;
1278   }
1279   if (classes_unloaded) {
1280     Metaspace::purge();
1281     set_metaspace_oom(false);
1282   }
1283 }
1284 
1285 void ClassLoaderDataGraph::post_class_unload_events() {
1286 #if INCLUDE_TRACE
1287   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1288   if (Tracing::enabled()) {
1289     if (Tracing::is_event_enabled(TraceClassUnloadEvent)) {
1290       assert(_unloading != NULL, "need class loader data unload list!");
1291       _class_unload_time = Ticks::now();
1292       classes_unloading_do(&class_unload_event);
1293     }
1294     Tracing::on_unloading_classes();
1295   }
1296 #endif
1297 }
1298 
1299 // CDS support
1300 
1301 // Global metaspaces for writing information to the shared archive.  When
1302 // application CDS is supported, we may need one per metaspace, so this
1303 // sort of looks like it.
1304 Metaspace* ClassLoaderData::_ro_metaspace = NULL;
1305 Metaspace* ClassLoaderData::_rw_metaspace = NULL;
1306 static bool _shared_metaspaces_initialized = false;
1307 
1308 // Initialize shared metaspaces (change to call from somewhere not lazily)
1309 void ClassLoaderData::initialize_shared_metaspaces() {
1310   assert(DumpSharedSpaces, "only use this for dumping shared spaces");
1311   assert(this == ClassLoaderData::the_null_class_loader_data(),
1312          "only supported for null loader data for now");
1313   assert (!_shared_metaspaces_initialized, "only initialize once");
1314   MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
1315   _ro_metaspace = new Metaspace(_metaspace_lock, Metaspace::ROMetaspaceType);
1316   _rw_metaspace = new Metaspace(_metaspace_lock, Metaspace::ReadWriteMetaspaceType);
1317   _shared_metaspaces_initialized = true;
1318 }
1319 
1320 Metaspace* ClassLoaderData::ro_metaspace() {
1321   assert(_ro_metaspace != NULL, "should already be initialized");
1322   return _ro_metaspace;
1323 }
1324 
1325 Metaspace* ClassLoaderData::rw_metaspace() {
1326   assert(_rw_metaspace != NULL, "should already be initialized");
1327   return _rw_metaspace;
1328 }
1329 
1330 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic()
1331     : _next_klass(NULL) {
1332   ClassLoaderData* cld = ClassLoaderDataGraph::_head;
1333   Klass* klass = NULL;
1334 
1335   // Find the first klass in the CLDG.
1336   while (cld != NULL) {
1337     assert_locked_or_safepoint(cld->metaspace_lock());
1338     klass = cld->_klasses;
1339     if (klass != NULL) {
1340       _next_klass = klass;
1341       return;
1342     }
1343     cld = cld->next();
1344   }
1345 }
1346 
1347 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) {
1348   Klass* next = klass->next_link();
1349   if (next != NULL) {
1350     return next;
1351   }
1352 
1353   // No more klasses in the current CLD. Time to find a new CLD.
1354   ClassLoaderData* cld = klass->class_loader_data();
1355   assert_locked_or_safepoint(cld->metaspace_lock());
1356   while (next == NULL) {
1357     cld = cld->next();
1358     if (cld == NULL) {
1359       break;
1360     }
1361     next = cld->_klasses;
1362   }
1363 
1364   return next;
1365 }
1366 
1367 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() {
1368   Klass* head = _next_klass;
1369 
1370   while (head != NULL) {
1371     Klass* next = next_klass_in_cldg(head);
1372 
1373     Klass* old_head = (Klass*)Atomic::cmpxchg_ptr(next, &_next_klass, head);
1374 
1375     if (old_head == head) {
1376       return head; // Won the CAS.
1377     }
1378 
1379     head = old_head;
1380   }
1381 
1382   // Nothing more for the iterator to hand out.
1383   assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1384   return NULL;
1385 }
1386 
1387 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() {
1388   _data = ClassLoaderDataGraph::_head;
1389 }
1390 
1391 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {}
1392 
1393 #ifndef PRODUCT
1394 // callable from debugger
1395 extern "C" int print_loader_data_graph() {
1396   ClassLoaderDataGraph::dump_on(tty);
1397   return 0;
1398 }
1399 
1400 void ClassLoaderDataGraph::verify() {
1401   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1402     data->verify();
1403   }
1404 }
1405 
1406 void ClassLoaderDataGraph::dump_on(outputStream * const out) {
1407   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1408     data->dump(out);
1409   }
1410   MetaspaceAux::dump(out);
1411 }
1412 #endif // PRODUCT
1413 
1414 void ClassLoaderData::print_value_on(outputStream* out) const {
1415   if (class_loader() == NULL) {
1416     out->print("NULL class_loader");
1417   } else {
1418     out->print("class loader " INTPTR_FORMAT " ", p2i(this));
1419     class_loader()->print_value_on(out);
1420   }
1421 }
1422 
1423 void ClassLoaderData::print_on(outputStream* out) const {
1424   if (class_loader() == NULL) {
1425     out->print("NULL class_loader");
1426   } else {
1427     out->print("class loader " INTPTR_FORMAT " ", p2i(this));
1428     class_loader()->print_on(out);
1429   }
1430 }
1431 
1432 #if INCLUDE_TRACE
1433 
1434 Ticks ClassLoaderDataGraph::_class_unload_time;
1435 
1436 void ClassLoaderDataGraph::class_unload_event(Klass* const k) {
1437   assert(k != NULL, "invariant");
1438 
1439   // post class unload event
1440   EventClassUnload event(UNTIMED);
1441   event.set_endtime(_class_unload_time);
1442   event.set_unloadedClass(k);
1443   event.set_definingClassLoader(k->class_loader_data());
1444   event.commit();
1445 }
1446 
1447 #endif // INCLUDE_TRACE