< prev index next >

src/hotspot/share/classfile/classLoaderData.cpp

Print this page




  30 // according to its name resolution strategy.
  31 //
  32 // Class loaders that implement a deterministic name resolution strategy
  33 // (including with respect to their delegation behavior), such as the boot, the
  34 // platform, and the system loaders of the JDK's built-in class loader
  35 // hierarchy, always produce the same linkset for a given configuration.
  36 //
  37 // ClassLoaderData carries information related to a linkset (e.g.,
  38 // metaspace holding its klass definitions).
  39 // The System Dictionary and related data structures (e.g., placeholder table,
  40 // loader constraints table) as well as the runtime representation of classes
  41 // only reference ClassLoaderData.
  42 //
  43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that
  44 // that represent the loader's "linking domain" in the JVM.
  45 //
  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"
  50 #include "classfile/classLoaderData.hpp"
  51 #include "classfile/classLoaderData.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/metadataOnStackMark.hpp"
  55 #include "classfile/moduleEntry.hpp"
  56 #include "classfile/packageEntry.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "code/codeCache.hpp"
  59 #include "gc/shared/gcLocker.hpp"
  60 #include "logging/log.hpp"
  61 #include "logging/logStream.hpp"

  62 #include "memory/metadataFactory.hpp"
  63 #include "memory/metaspaceShared.hpp"
  64 #include "memory/oopFactory.hpp"
  65 #include "memory/resourceArea.hpp"
  66 #include "oops/objArrayOop.inline.hpp"
  67 #include "oops/oop.inline.hpp"
  68 #include "runtime/atomic.hpp"
  69 #include "runtime/javaCalls.hpp"
  70 #include "runtime/jniHandles.hpp"
  71 #include "runtime/mutex.hpp"
  72 #include "runtime/orderAccess.hpp"
  73 #include "runtime/safepoint.hpp"
  74 #include "runtime/synchronizer.hpp"
  75 #include "utilities/growableArray.hpp"
  76 #include "utilities/macros.hpp"
  77 #include "utilities/ostream.hpp"
  78 #if INCLUDE_ALL_GCS
  79 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
  80 #endif // INCLUDE_ALL_GCS
  81 #if INCLUDE_TRACE
  82 #include "trace/tracing.hpp"

  83 #endif
  84 
  85 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  86 
  87 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) :
  88   _class_loader(h_class_loader()),
  89   _is_anonymous(is_anonymous),
  90   // An anonymous class loader data doesn't have anything to keep
  91   // it from being unloaded during parsing of the anonymous class.
  92   // The null-class-loader should always be kept alive.
  93   _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0),
  94   _metaspace(NULL), _unloading(false), _klasses(NULL),
  95   _modules(NULL), _packages(NULL),
  96   _claimed(0), _modified_oops(true), _accumulated_modified_oops(false),
  97   _jmethod_ids(NULL), _handles(), _deallocate_list(NULL),
  98   _next(NULL), _dependencies(dependencies),
  99   _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true,
 100                             Monitor::_safepoint_check_never)) {
 101 
 102   // A ClassLoaderData created solely for an anonymous class should never have a
 103   // ModuleEntryTable or PackageEntryTable created for it. The defining package
 104   // and module for an anonymous class will be found in its host class.
 105   if (!is_anonymous) {
 106     _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size);
 107     if (h_class_loader.is_null()) {
 108       // Create unnamed module for boot loader
 109       _unnamed_module = ModuleEntry::create_boot_unnamed_module(this);
 110     } else {
 111       // Create unnamed module for all other loaders
 112       _unnamed_module = ModuleEntry::create_unnamed_module(this);
 113     }
 114   } else {
 115     _unnamed_module = NULL;
 116   }
 117 
 118   if (!is_anonymous) {
 119     _dictionary = create_dictionary();
 120   } else {
 121     _dictionary = NULL;
 122   }
 123   TRACE_INIT_ID(this);







 124 }
 125 
 126 void ClassLoaderData::init_dependencies(TRAPS) {
 127   assert(!Universe::is_fully_initialized(), "should only be called when initializing");
 128   assert(is_the_null_class_loader_data(), "should only call this for the null class loader");
 129   _dependencies.init(CHECK);


























 130 }
 131 
 132 void ClassLoaderData::Dependencies::init(TRAPS) {
 133   // Create empty dependencies array to add to. CMS requires this to be
 134   // an oop so that it can track additions via card marks.  We think.
 135   _list_head = oopFactory::new_objectArray(2, CHECK);
 136 }
 137 
 138 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
 139   Chunk* c = _head;
 140   while (c != NULL) {
 141     Chunk* next = c->_next;
 142     delete c;
 143     c = next;
 144   }
 145 }
 146 
 147 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
 148   if (_head == NULL || _head->_size == Chunk::CAPACITY) {
 149     Chunk* next = new Chunk(_head);
 150     OrderAccess::release_store(&_head, next);
 151   }
 152   oop* handle = &_head->_data[_head->_size];
 153   *handle = o;
 154   OrderAccess::release_store(&_head->_size, _head->_size + 1);
 155   return handle;
 156 }
 157 
 158 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
 159   for (juint i = 0; i < size; i++) {
 160     if (c->_data[i] != NULL) {
 161       f->do_oop(&c->_data[i]);
 162     }
 163   }
 164 }
 165 
 166 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
 167   Chunk* head = OrderAccess::load_acquire(&_head);
 168   if (head != NULL) {
 169     // Must be careful when reading size of head
 170     oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
 171     for (Chunk* c = head->_next; c != NULL; c = c->_next) {
 172       oops_do_chunk(f, c, c->_size);
 173     }
 174   }
 175 }
 176 








 177 #ifdef ASSERT
 178 class VerifyContainsOopClosure : public OopClosure {
 179   oop* _target;
 180   bool _found;
 181 
 182  public:
 183   VerifyContainsOopClosure(oop* target) : _target(target), _found(false) {}
 184 
 185   void do_oop(oop* p) {
 186     if (p == _target) {
 187       _found = true;
 188     }
 189   }
 190 
 191   void do_oop(narrowOop* p) {
 192     // The ChunkedHandleList should not contain any narrowOop
 193     ShouldNotReachHere();
 194   }
 195 
 196   bool found() const {
 197     return _found;
 198   }
 199 };
 200 
 201 bool ClassLoaderData::ChunkedHandleList::contains(oop* p) {
 202   VerifyContainsOopClosure cl(p);
 203   oops_do(&cl);
 204   return cl.found();
 205 }
 206 #endif // ASSERT
 207 
 208 bool ClassLoaderData::claim() {
 209   if (_claimed == 1) {
 210     return false;
 211   }
 212 
 213   return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0;
 214 }

 215 
 216 // Anonymous classes have their own ClassLoaderData that is marked to keep alive
 217 // while the class is being parsed, and if the class appears on the module fixup list.
 218 // Due to the uniqueness that no other class shares the anonymous class' name or
 219 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while
 220 // it is being defined, therefore _keep_alive is not volatile or atomic.
 221 void ClassLoaderData::inc_keep_alive() {
 222   if (is_anonymous()) {
 223     assert(_keep_alive >= 0, "Invalid keep alive increment count");
 224     _keep_alive++;




 225   }
 226 }
 227 
 228 void ClassLoaderData::dec_keep_alive() {
 229   if (is_anonymous()) {
 230     assert(_keep_alive > 0, "Invalid keep alive decrement count");
 231     _keep_alive--;
 232   }

 233 }
 234 
 235 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) {
 236   if (must_claim && !claim()) {
 237     return;
 238   }
 239 
 240   // Only clear modified_oops after the ClassLoaderData is claimed.
 241   if (clear_mod_oops) {
 242     clear_modified_oops();
 243   }
 244 
 245   f->do_oop(&_class_loader);
 246   _dependencies.oops_do(f);
 247   _handles.oops_do(f);





 248 }
 249 
 250 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) {
 251   f->do_oop((oop*)&_list_head);
 252 }











 253 
 254 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
 255   // Lock-free access requires load_acquire
 256   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 257     klass_closure->do_klass(k);
 258     assert(k != k->next_link(), "no loops!");






 259   }
 260 }
 261 
 262 void ClassLoaderData::classes_do(void f(Klass * const)) {
 263   // Lock-free access requires load_acquire
 264   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 265     f(k);
 266     assert(k != k->next_link(), "no loops!");
 267   }
 268 }
 269 
 270 void ClassLoaderData::methods_do(void f(Method*)) {
 271   // Lock-free access requires load_acquire
 272   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 273     if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
 274       InstanceKlass::cast(k)->methods_do(f);
 275     }
 276   }

 277 }
 278 
 279 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
 280   // Lock-free access requires load_acquire
 281   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 282     // Do not filter ArrayKlass oops here...
 283     if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
 284       klass_closure->do_klass(k);
 285     }
 286   }
 287 }
 288 
 289 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 290   // Lock-free access requires load_acquire
 291   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 292     if (k->is_instance_klass()) {
 293       f(InstanceKlass::cast(k));
 294     }
 295     assert(k != k->next_link(), "no loops!");
 296   }
 297 }
 298 
 299 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 300   assert_locked_or_safepoint(Module_lock);
 301   if (_unnamed_module != NULL) {
 302     f(_unnamed_module);
 303   }
 304   if (_modules != NULL) {
 305     for (int i = 0; i < _modules->table_size(); i++) {
 306       for (ModuleEntry* entry = _modules->bucket(i);
 307            entry != NULL;
 308            entry = entry->next()) {
 309         f(entry);
 310       }
 311     }
 312   }
 313 }
 314 
 315 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 316   assert_locked_or_safepoint(Module_lock);
 317   if (_packages != NULL) {
 318     for (int i = 0; i < _packages->table_size(); i++) {
 319       for (PackageEntry* entry = _packages->bucket(i);
 320            entry != NULL;
 321            entry = entry->next()) {
 322         f(entry);
 323       }
 324     }
 325   }
 326 }
 327 
 328 void ClassLoaderData::record_dependency(const Klass* k, TRAPS) {
 329   assert(k != NULL, "invariant");
 330 
 331   ClassLoaderData * const from_cld = this;
 332   ClassLoaderData * const to_cld = k->class_loader_data();
 333 
 334   // Dependency to the null class loader data doesn't need to be recorded
 335   // because the null class loader data never goes away.
 336   if (to_cld->is_the_null_class_loader_data()) {
 337     return;
 338   }
 339 
 340   oop to;
 341   if (to_cld->is_anonymous()) {
 342     // Anonymous class dependencies are through the mirror.
 343     to = k->java_mirror();
 344   } else {
 345     to = to_cld->class_loader();
 346 
 347     // If from_cld is anonymous, even if it's class_loader is a parent of 'to'
 348     // we still have to add it.  The class_loader won't keep from_cld alive.
 349     if (!from_cld->is_anonymous()) {
 350       // Check that this dependency isn't from the same or parent class_loader
 351       oop from = from_cld->class_loader();
 352 
 353       oop curr = from;
 354       while (curr != NULL) {
 355         if (curr == to) {
 356           return; // this class loader is in the parent list, no need to add it.
 357         }
 358         curr = java_lang_ClassLoader::parent(curr);
 359       }
 360     }








 361   }


 362 
 363   // It's a dependency we won't find through GC, add it. This is relatively rare
 364   // Must handle over GC point.
 365   Handle dependency(THREAD, to);
 366   from_cld->_dependencies.add(dependency, CHECK);
 367 
 368   // Added a potentially young gen oop to the ClassLoaderData
 369   record_modified_oops();
 370 }
 371 
 372 
 373 void ClassLoaderData::Dependencies::add(Handle dependency, TRAPS) {
 374   // Check first if this dependency is already in the list.
 375   // Save a pointer to the last to add to under the lock.
 376   objArrayOop ok = _list_head;
 377   objArrayOop last = NULL;
 378   while (ok != NULL) {
 379     last = ok;
 380     if (ok->obj_at(0) == dependency()) {
 381       // Don't need to add it
 382       return;
 383     }
 384     ok = (objArrayOop)ok->obj_at(1);
 385   }
 386 
 387   // Must handle over GC points
 388   assert (last != NULL, "dependencies should be initialized");
 389   objArrayHandle last_handle(THREAD, last);
 390 
 391   // Create a new dependency node with fields for (class_loader or mirror, next)
 392   objArrayOop deps = oopFactory::new_objectArray(2, CHECK);
 393   deps->obj_at_put(0, dependency());
 394 
 395   // Must handle over GC points
 396   objArrayHandle new_dependency(THREAD, deps);
 397 
 398   // Add the dependency under lock
 399   locked_add(last_handle, new_dependency, THREAD);
 400 }
 401 
 402 void ClassLoaderData::Dependencies::locked_add(objArrayHandle last_handle,
 403                                                objArrayHandle new_dependency,
 404                                                Thread* THREAD) {
 405 
 406   // Have to lock and put the new dependency on the end of the dependency
 407   // array so the card mark for CMS sees that this dependency is new.
 408   // Can probably do this lock free with some effort.
 409   ObjectLocker ol(Handle(THREAD, _list_head), THREAD);
 410 
 411   oop loader_or_mirror = new_dependency->obj_at(0);
 412 
 413   // Since the dependencies are only added, add to the end.
 414   objArrayOop end = last_handle();
 415   objArrayOop last = NULL;
 416   while (end != NULL) {
 417     last = end;
 418     // check again if another thread added it to the end.
 419     if (end->obj_at(0) == loader_or_mirror) {
 420       // Don't need to add it
 421       return;
 422     }
 423     end = (objArrayOop)end->obj_at(1);














 424   }
 425   assert (last != NULL, "dependencies should be initialized");
 426   // fill in the first element with the oop in new_dependency.
 427   if (last->obj_at(0) == NULL) {
 428     last->obj_at_put(0, new_dependency->obj_at(0));
 429   } else {
 430     last->obj_at_put(1, new_dependency());




 431   }
 432 }
 433 
 434 void ClassLoaderDataGraph::clear_claimed_marks() {
 435   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
 436     cld->clear_claimed();
 437   }
 438 }
 439 
 440 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
 441   {
 442     MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 443     Klass* old_value = _klasses;
 444     k->set_next_link(old_value);
 445     // Link the new item into the list, making sure the linked class is stable
 446     // since the list can be walked without a lock
 447     OrderAccess::release_store(&_klasses, k);











 448   }
 449 
 450   if (publicize && k->class_loader_data() != NULL) {
 451     ResourceMark rm;
 452     log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: "
 453                   PTR_FORMAT " loader: " PTR_FORMAT " %s",
 454                   p2i(k),
 455                   k->external_name(),
 456                   p2i(k->class_loader_data()),
 457                   p2i((void *)k->class_loader()),
 458                   loader_name());
 459   }
 460 }
 461 
 462 // Class iterator used by the compiler.  It gets some number of classes at
 463 // a safepoint to decay invocation counters on the methods.
 464 class ClassLoaderDataGraphKlassIteratorStatic {



 465   ClassLoaderData* _current_loader_data;
 466   Klass*           _current_class_entry;
 467  public:
 468 
 469   ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {}
 470 
 471   InstanceKlass* try_get_next_class() {
 472     assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 473     int max_classes = InstanceKlass::number_of_instance_classes();
 474     assert(max_classes > 0, "should not be called with no instance classes");
 475     for (int i = 0; i < max_classes; ) {
 476 
 477       if (_current_class_entry != NULL) {
 478         Klass* k = _current_class_entry;
 479         _current_class_entry = _current_class_entry->next_link();
 480 
 481         if (k->is_instance_klass()) {
 482           InstanceKlass* ik = InstanceKlass::cast(k);
 483           i++;  // count all instance classes found
 484           // Not yet loaded classes are counted in max_classes
 485           // but only return loaded classes.
 486           if (ik->is_loaded()) {
 487             return ik;
 488           }
 489         }


 504     // Caller handles NULL.
 505     return NULL;
 506   }
 507 
 508   // If the current class for the static iterator is a class being unloaded or
 509   // deallocated, adjust the current class.
 510   void adjust_saved_class(ClassLoaderData* cld) {
 511     if (_current_loader_data == cld) {
 512       _current_loader_data = cld->next();
 513       if (_current_loader_data != NULL) {
 514         _current_class_entry = _current_loader_data->klasses();
 515       }  // else try_get_next_class will start at the head
 516     }
 517   }
 518 
 519   void adjust_saved_class(Klass* klass) {
 520     if (_current_class_entry == klass) {
 521       _current_class_entry = klass->next_link();
 522     }
 523   }



 524 };
 525 
 526 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator;
 527 
 528 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() {
 529   return static_klass_iterator.try_get_next_class();
 530 }
 531 
 532 
 533 // Remove a klass from the _klasses list for scratch_class during redefinition
 534 // or parsed class in the case of an error.
 535 void ClassLoaderData::remove_class(Klass* scratch_class) {
 536   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 537 
 538   // Adjust global class iterator.
 539   static_klass_iterator.adjust_saved_class(scratch_class);
 540 
 541   Klass* prev = NULL;
 542   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 543     if (k == scratch_class) {
 544       if (prev == NULL) {
 545         _klasses = k->next_link();
 546       } else {
 547         Klass* next = k->next_link();
 548         prev->set_next_link(next);
 549       }
 550       return;
 551     }
 552     prev = k;
 553     assert(k != k->next_link(), "no loops!");
 554   }
 555   ShouldNotReachHere();   // should have found this class!!
 556 }
 557 
 558 void ClassLoaderData::unload() {
 559   _unloading = true;
 560 
 561   // Tell serviceability tools these classes are unloading
 562   classes_do(InstanceKlass::notify_unload_class);
 563 
 564   LogTarget(Debug, class, loader, data) lt;
 565   if (lt.is_enabled()) {
 566     ResourceMark rm;
 567     LogStream ls(lt);
 568     ls.print(": unload loader data " INTPTR_FORMAT, p2i(this));
 569     ls.print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()),
 570                loader_name());
 571     if (is_anonymous()) {
 572       ls.print(" for anonymous class  " INTPTR_FORMAT " ", p2i(_klasses));
 573     }
 574     ls.cr();
 575   }
 576 
 577   // Some items on the _deallocate_list need to free their C heap structures
 578   // if they are not already on the _klasses list.
 579   unload_deallocate_list();
 580 
 581   // Clean up global class iterator for compiler
 582   static_klass_iterator.adjust_saved_class(this);
 583 }
 584 
 585 ModuleEntryTable* ClassLoaderData::modules() {
 586   // Lazily create the module entry table at first request.
 587   // Lock-free access requires load_acquire.
 588   ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
 589   if (modules == NULL) {
 590     MutexLocker m1(Module_lock);
 591     // Check if _modules got allocated while we were waiting for this lock.
 592     if ((modules = _modules) == NULL) {
 593       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 594 
 595       {
 596         MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 597         // Ensure _modules is stable, since it is examined without a lock
 598         OrderAccess::release_store(&_modules, modules);

 599       }







 600     }

 601   }
 602   return modules;
 603 }
 604 
 605 const int _boot_loader_dictionary_size    = 1009;
 606 const int _default_loader_dictionary_size = 107;
 607 
 608 Dictionary* ClassLoaderData::create_dictionary() {
 609   assert(!is_anonymous(), "anonymous class loader data do not have a dictionary");
 610   int size;
 611   bool resizable = false;
 612   if (_the_null_class_loader_data == NULL) {
 613     size = _boot_loader_dictionary_size;
 614     resizable = true;
 615   } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 616     size = 1;  // there's only one class in relection class loader and no initiated classes
 617   } else if (is_system_class_loader_data()) {
 618     size = _boot_loader_dictionary_size;
 619     resizable = true;
 620   } else {
 621     size = _default_loader_dictionary_size;
 622     resizable = true;
 623   }
 624   if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces || UseSharedSpaces) {
 625     resizable = false;
 626   }
 627   return new Dictionary(this, size, resizable);
 628 }
 629 
 630 // Unloading support
 631 oop ClassLoaderData::keep_alive_object() const {
 632   assert_locked_or_safepoint(_metaspace_lock);
 633   assert(!keep_alive(), "Don't use with CLDs that are artificially kept alive");
 634   return is_anonymous() ? _klasses->java_mirror() : class_loader();


 635 }
 636 
 637 bool ClassLoaderData::is_alive(BoolObjectClosure* is_alive_closure) const {
 638   bool alive = keep_alive() // null class loader and incomplete anonymous klasses.
 639       || is_alive_closure->do_object_b(keep_alive_object());
 640 
 641   return alive;









 642 }
 643 
 644 ClassLoaderData::~ClassLoaderData() {
 645   // Release C heap structures for all the classes.
 646   classes_do(InstanceKlass::release_C_heap_structures);
 647 
 648   // Release C heap allocated hashtable for all the packages.
 649   if (_packages != NULL) {
 650     // Destroy the table itself
 651     delete _packages;
 652     _packages = NULL;


 653   }
 654 
 655   // Release C heap allocated hashtable for all the modules.
 656   if (_modules != NULL) {
 657     // Destroy the table itself
 658     delete _modules;
 659     _modules = NULL;
 660   }
 661 
 662   // Release C heap allocated hashtable for the dictionary
 663   if (_dictionary != NULL) {
 664     // Destroy the table itself
 665     delete _dictionary;
 666     _dictionary = NULL;
 667   }

 668 
 669   if (_unnamed_module != NULL) {
 670     _unnamed_module->delete_unnamed_module();
 671     _unnamed_module = NULL;
 672   }
 673 
 674   // release the metaspace
 675   Metaspace *m = _metaspace;
 676   if (m != NULL) {
 677     _metaspace = NULL;
 678     delete m;
 679   }
 680   // Clear all the JNI handles for methods
 681   // These aren't deallocated and are going to look like a leak, but that's
 682   // needed because we can't really get rid of jmethodIDs because we don't
 683   // know when native code is going to stop using them.  The spec says that
 684   // they're "invalid" but existing programs likely rely on their being
 685   // NULL after class unloading.
 686   if (_jmethod_ids != NULL) {
 687     Method::clear_jmethod_ids(this);
 688   }
 689   // Delete lock
 690   delete _metaspace_lock;
 691 
 692   // Delete free list
 693   if (_deallocate_list != NULL) {
 694     delete _deallocate_list;
 695   }
 696 }
 697 
 698 // Returns true if this class loader data is for the system class loader.
 699 bool ClassLoaderData::is_system_class_loader_data() const {
 700   return SystemDictionary::is_system_class_loader(class_loader());


 701 }
 702 
 703 // Returns true if this class loader data is for the platform class loader.
 704 bool ClassLoaderData::is_platform_class_loader_data() const {
 705   return SystemDictionary::is_platform_class_loader(class_loader());
 706 }
 707 
 708 // Returns true if this class loader data is one of the 3 builtin
 709 // (boot, application/system or platform) class loaders. Note, the
 710 // builtin loaders are not freed by a GC.
 711 bool ClassLoaderData::is_builtin_class_loader_data() const {
 712   return (is_the_null_class_loader_data() ||
 713           SystemDictionary::is_system_class_loader(class_loader()) ||
 714           SystemDictionary::is_platform_class_loader(class_loader()));


















 715 }
 716 
 717 Metaspace* ClassLoaderData::metaspace_non_null() {
 718   // If the metaspace has not been allocated, create a new one.  Might want
 719   // to create smaller arena for Reflection class loaders also.
 720   // The reason for the delayed allocation is because some class loaders are
 721   // simply for delegating with no metadata of their own.
 722   // Lock-free access requires load_acquire.
 723   Metaspace* metaspace = OrderAccess::load_acquire(&_metaspace);
 724   if (metaspace == NULL) {
 725     MutexLockerEx ml(_metaspace_lock,  Mutex::_no_safepoint_check_flag);
 726     // Check if _metaspace got allocated while we were waiting for this lock.
 727     if ((metaspace = _metaspace) == NULL) {
 728       if (this == the_null_class_loader_data()) {
 729         assert (class_loader() == NULL, "Must be");
 730         metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
 731       } else if (is_anonymous()) {
 732         if (class_loader() != NULL) {
 733           log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name());
 734         }
 735         metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType);
 736       } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 737         if (class_loader() != NULL) {
 738           log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name());
 739         }
 740         metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
 741       } else {
 742         metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
 743       }
 744       // Ensure _metaspace is stable, since it is examined without a lock
 745       OrderAccess::release_store(&_metaspace, metaspace);
 746     }
 747   }
 748   return metaspace;
 749 }
 750 












































 751 OopHandle ClassLoaderData::add_handle(Handle h) {
 752   MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 753   record_modified_oops();
 754   return OopHandle(_handles.add(h()));
 755 }
 756 
 757 void ClassLoaderData::remove_handle(OopHandle h) {
 758   assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading");
 759   oop* ptr = h.ptr_raw();
 760   if (ptr != NULL) {
 761     assert(_handles.contains(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
 762 #if INCLUDE_ALL_GCS
 763     // This barrier is used by G1 to remember the old oop values, so
 764     // that we don't forget any objects that were live at the snapshot at
 765     // the beginning.
 766     if (UseG1GC) {
 767       oop obj = *ptr;
 768       if (obj != NULL) {
 769         G1SATBCardTableModRefBS::enqueue(obj);
 770       }
 771     }
 772 #endif
 773     *ptr = NULL;
 774   }
 775 }
 776 
 777 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) {
 778   MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 779   if (dest.resolve() != NULL) {
 780     return;
 781   } else {
 782     dest = _handles.add(h());
 783   }
 784 }
 785 
 786 // Add this metadata pointer to be freed when it's safe.  This is only during
 787 // class unloading because Handles might point to this metadata field.
 788 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
 789   // Metadata in shared region isn't deleted.
 790   if (!m->is_shared()) {
 791     MutexLockerEx ml(metaspace_lock(),  Mutex::_no_safepoint_check_flag);
 792     if (_deallocate_list == NULL) {
 793       _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true);



 794     }
 795     _deallocate_list->append_if_missing(m);









 796   }
 797 }
 798 
 799 // Deallocate free metadata on the free list.  How useful the PermGen was!
 800 void ClassLoaderData::free_deallocate_list() {
 801   // Don't need lock, at safepoint
 802   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 803   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 804   if (_deallocate_list == NULL) {
 805     return;
 806   }
 807   // Go backwards because this removes entries that are freed.
 808   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 809     Metadata* m = _deallocate_list->at(i);
 810     if (!m->on_stack()) {
 811       _deallocate_list->remove_at(i);
 812       // There are only three types of metadata that we deallocate directly.
 813       // Cast them so they can be used by the template function.
 814       if (m->is_method()) {
 815         MetadataFactory::free_metadata(this, (Method*)m);
 816       } else if (m->is_constantPool()) {
 817         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 818       } else if (m->is_klass()) {
 819         MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 820       } else {
 821         ShouldNotReachHere();

 822       }
 823     } else {
 824       // Metadata is alive.
 825       // If scratch_class is on stack then it shouldn't be on this list!
 826       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 827              "scratch classes on this list should be dead");
 828       // Also should assert that other metadata on the list was found in handles.
 829     }


 830   }

 831 }
 832 
 833 // This is distinct from free_deallocate_list.  For class loader data that are
 834 // unloading, this frees the C heap memory for constant pools on the list.  If there
 835 // were C heap memory allocated for methods, it would free that too.  The C heap memory
 836 // for InstanceKlasses on this list is freed in the ClassLoaderData destructor.
 837 void ClassLoaderData::unload_deallocate_list() {
 838   // Don't need lock, at safepoint
 839   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 840   assert(is_unloading(), "only called for ClassLoaderData that are unloading");
 841   if (_deallocate_list == NULL) {








 842     return;
 843   }
 844   // Go backwards because this removes entries that are freed.
 845   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 846     Metadata* m = _deallocate_list->at(i);
 847     assert (!m->on_stack(), "wouldn't be unloading if this were so");
 848     _deallocate_list->remove_at(i);
 849     // Only constant pool entries have C heap memory to free.
 850     if (m->is_constantPool()) {
 851       ((ConstantPool*)m)->release_C_heap_structures();













 852     }
 853   }
 854 }
 855 
 856 // These anonymous class loaders are to contain classes used for JSR292
 857 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(oop loader, TRAPS) {
 858   // Add a new class loader data to the graph.
 859   Handle lh(THREAD, loader);
 860   return ClassLoaderDataGraph::add(lh, true, THREAD);


 861 }
 862 
 863 const char* ClassLoaderData::loader_name() {
 864   // Handles null class loader
 865   return SystemDictionary::loader_name(class_loader());








 866 }
 867 
 868 #ifndef PRODUCT
 869 // Define to dump klasses
 870 #undef CLD_DUMP_KLASSES
 871 
 872 void ClassLoaderData::dump(outputStream * const out) {
 873   out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {",
 874       p2i(this), p2i((void *)class_loader()),
 875       p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name());
 876   if (claimed()) out->print(" claimed ");
 877   if (is_unloading()) out->print(" unloading ");
 878   out->cr();
 879   if (metaspace_or_null() != NULL) {
 880     out->print_cr("metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null()));
 881     metaspace_or_null()->dump(out);
 882   } else {
 883     out->print_cr("metaspace: NULL");
 884   }
 885 
 886 #ifdef CLD_DUMP_KLASSES
 887   if (Verbose) {
 888     Klass* k = _klasses;
 889     while (k != NULL) {
 890       out->print_cr("klass " PTR_FORMAT ", %s", p2i(k), k->name()->as_C_string());
 891       assert(k != k->next_link(), "no loops!");
 892       k = k->next_link();
 893     }
 894   }
 895 #endif  // CLD_DUMP_KLASSES
 896 #undef CLD_DUMP_KLASSES
 897   if (_jmethod_ids != NULL) {
 898     Method::print_jmethod_ids(this, out);
 899   }
 900   out->print_cr("}");
 901 }
 902 #endif // PRODUCT
 903 
 904 void ClassLoaderData::verify() {
 905   assert_locked_or_safepoint(_metaspace_lock);
 906   oop cl = class_loader();
 907 
 908   guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same");
 909   guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be");
 910 
 911   // Verify the integrity of the allocated space.
 912   if (metaspace_or_null() != NULL) {
 913     metaspace_or_null()->verify();
 914   }
 915 
 916   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 917     guarantee(k->class_loader_data() == this, "Must be the same");
 918     k->verify();
 919     assert(k != k->next_link(), "no loops!");
 920   }
 921 }
 922 
 923 bool ClassLoaderData::contains_klass(Klass* klass) {
 924   // Lock-free access requires load_acquire
 925   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 926     if (k == klass) return true;













 927   }
 928   return false;
 929 }
 930 
 931 
 932 // GC root of class loader data created.
 933 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
 934 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
 935 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
 936 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
 937 
 938 bool ClassLoaderDataGraph::_should_purge = false;
 939 bool ClassLoaderDataGraph::_metaspace_oom = false;
 940 





























 941 // Add a new class loader data node to the list.  Assign the newly created
 942 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
 943 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) {
 944   // We need to allocate all the oops for the ClassLoaderData before allocating the
 945   // actual ClassLoaderData object.
 946   ClassLoaderData::Dependencies dependencies(CHECK_NULL);
 947 
 948   NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the
 949                                      // ClassLoaderData in the graph since the CLD
 950                                      // contains unhandled oops
 951 
 952   ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies);
 953 
 954 
 955   if (!is_anonymous) {
 956     ClassLoaderData** cld_addr = java_lang_ClassLoader::loader_data_addr(loader());
 957     // First, Atomically set it
 958     ClassLoaderData* old = Atomic::cmpxchg(cld, cld_addr, (ClassLoaderData*)NULL);
 959     if (old != NULL) {
 960       delete cld;


 967   // class loader data
 968   ClassLoaderData** list_head = &_head;
 969   ClassLoaderData* next = _head;
 970 
 971   do {
 972     cld->set_next(next);
 973     ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next);
 974     if (exchanged == next) {
 975       LogTarget(Debug, class, loader, data) lt;
 976       if (lt.is_enabled()) {
 977        PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual
 978        LogStream ls(lt);
 979        print_creation(&ls, loader, cld, CHECK_NULL);
 980       }
 981       return cld;
 982     }
 983     next = exchanged;
 984   } while (true);
 985 }
 986 
 987 void ClassLoaderDataGraph::print_creation(outputStream* out, Handle loader, ClassLoaderData* cld, TRAPS) {
 988   Handle string;
 989   if (loader.not_null()) {
 990     // Include the result of loader.toString() in the output. This allows
 991     // the user of the log to identify the class loader instance.
 992     JavaValue result(T_OBJECT);
 993     Klass* spec_klass = SystemDictionary::ClassLoader_klass();
 994     JavaCalls::call_virtual(&result,
 995                             loader,
 996                             spec_klass,
 997                             vmSymbols::toString_name(),
 998                             vmSymbols::void_string_signature(),
 999                             CHECK);
1000     assert(result.get_type() == T_OBJECT, "just checking");
1001     string = Handle(THREAD, (oop)result.get_jobject());
1002   }
1003 
1004   ResourceMark rm;
1005   out->print("create class loader data " INTPTR_FORMAT, p2i(cld));
1006   out->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)cld->class_loader()),
1007              cld->loader_name());
1008 
1009   if (string.not_null()) {
1010     out->print(": ");
1011     java_lang_String::print(string(), out);
1012   }
1013   out->cr();
1014 }
1015 
1016 
1017 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) {
1018   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1019     cld->oops_do(f, must_claim);
1020   }
1021 }
1022 
1023 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) {
1024   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1025     if (cld->keep_alive()) {
1026       cld->oops_do(f, must_claim);
1027     }
1028   }
1029 }
1030 
1031 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) {
1032   if (ClassUnloading) {
1033     keep_alive_oops_do(f, must_claim);
1034   } else {
1035     oops_do(f, must_claim);
1036   }


1209         ls.cr();
1210       }
1211     }
1212 
1213     curr = curr->_next;
1214   }
1215 
1216   return array;
1217 }
1218 
1219 bool ClassLoaderDataGraph::unload_list_contains(const void* x) {
1220   assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint");
1221   for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) {
1222     if (cld->metaspace_or_null() != NULL && cld->metaspace_or_null()->contains(x)) {
1223       return true;
1224     }
1225   }
1226   return false;
1227 }
1228 
1229 #ifndef PRODUCT
1230 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) {
1231   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1232     if (loader_data == data) {
1233       return true;














1234     }

1235   }
1236 
1237   return false;
1238 }
1239 #endif // PRODUCT
1240 
1241 
1242 // Move class loader data from main list to the unloaded list for unloading
1243 // and deallocation later.
1244 bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure,
1245                                         bool clean_previous_versions) {
1246 
1247   ClassLoaderData* data = _head;
1248   ClassLoaderData* prev = NULL;
1249   bool seen_dead_loader = false;
1250 
1251   // Mark metadata seen on the stack only so we can delete unneeded entries.
1252   // Only walk all metadata, including the expensive code cache walk, for Full GC
1253   // and only if class redefinition and if there's previous versions of
1254   // Klasses to delete.
1255   bool walk_all_metadata = clean_previous_versions &&
1256                            JvmtiExport::has_redefined_a_class() &&
1257                            InstanceKlass::has_previous_versions_and_reset();
1258   MetadataOnStackMark md_on_stack(walk_all_metadata);
1259 
1260   // Save previous _unloading pointer for CMS which may add to unloading list before


1320 }
1321 
1322 void ClassLoaderDataGraph::purge() {
1323   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1324   ClassLoaderData* list = _unloading;
1325   _unloading = NULL;
1326   ClassLoaderData* next = list;
1327   bool classes_unloaded = false;
1328   while (next != NULL) {
1329     ClassLoaderData* purge_me = next;
1330     next = purge_me->next();
1331     delete purge_me;
1332     classes_unloaded = true;
1333   }
1334   if (classes_unloaded) {
1335     Metaspace::purge();
1336     set_metaspace_oom(false);
1337   }
1338 }
1339 






1340 int ClassLoaderDataGraph::resize_if_needed() {
1341   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1342   int resized = 0;
1343   if (Dictionary::does_any_dictionary_needs_resizing()) {
1344     FOR_ALL_DICTIONARY(cld) {
1345       if (cld->dictionary()->resize_if_needed()) {
1346         resized++;
1347       }
1348     }
1349   }
1350   return resized;
1351 }
1352 
1353 void ClassLoaderDataGraph::post_class_unload_events() {
1354 #if INCLUDE_TRACE
1355   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1356   if (Tracing::enabled()) {
1357     if (Tracing::is_event_enabled(TraceClassUnloadEvent)) {
1358       assert(_unloading != NULL, "need class loader data unload list!");
1359       _class_unload_time = Ticks::now();
1360       classes_unloading_do(&class_unload_event);
1361     }
1362     Tracing::on_unloading_classes();




1363   }
1364 #endif









1365 }
1366 








1367 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic()
1368     : _next_klass(NULL) {
1369   ClassLoaderData* cld = ClassLoaderDataGraph::_head;
1370   Klass* klass = NULL;
1371 
1372   // Find the first klass in the CLDG.
1373   while (cld != NULL) {
1374     assert_locked_or_safepoint(cld->metaspace_lock());
1375     klass = cld->_klasses;
1376     if (klass != NULL) {
1377       _next_klass = klass;
1378       return;
1379     }
1380     cld = cld->next();
1381   }
1382 }
1383 
1384 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) {
1385   Klass* next = klass->next_link();
1386   if (next != NULL) {


1404 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() {
1405   Klass* head = _next_klass;
1406 
1407   while (head != NULL) {
1408     Klass* next = next_klass_in_cldg(head);
1409 
1410     Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head);
1411 
1412     if (old_head == head) {
1413       return head; // Won the CAS.
1414     }
1415 
1416     head = old_head;
1417   }
1418 
1419   // Nothing more for the iterator to hand out.
1420   assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1421   return NULL;
1422 }
1423 
1424 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() {
1425   _data = ClassLoaderDataGraph::_head;
1426 }
1427 
1428 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {}
1429 
1430 #ifndef PRODUCT
1431 // callable from debugger
1432 extern "C" int print_loader_data_graph() {
1433   ClassLoaderDataGraph::dump_on(tty);
1434   return 0;
1435 }
1436 
1437 void ClassLoaderDataGraph::verify() {
1438   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1439     data->verify();
1440   }
1441 }
1442 
1443 void ClassLoaderDataGraph::dump_on(outputStream * const out) {
1444   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1445     data->dump(out);
1446   }
1447   MetaspaceAux::dump(out);
1448 }
1449 #endif // PRODUCT
1450 
1451 void ClassLoaderData::print_value_on(outputStream* out) const {
1452   if (class_loader() == NULL) {
1453     out->print("NULL class loader");
1454   } else {
1455     out->print("class loader " INTPTR_FORMAT " ", p2i(this));
1456     class_loader()->print_value_on(out);
1457   }
1458 }
1459 
1460 void ClassLoaderData::print_on(outputStream* out) const {
1461   if (class_loader() == NULL) {
1462     out->print("NULL class loader");
1463   } else {
1464     out->print("class loader " INTPTR_FORMAT " ", p2i(this));
1465     class_loader()->print_on(out);
1466   }
1467 }
1468 
1469 #if INCLUDE_TRACE
1470 
1471 Ticks ClassLoaderDataGraph::_class_unload_time;
1472 
1473 void ClassLoaderDataGraph::class_unload_event(Klass* const k) {
1474   assert(k != NULL, "invariant");
1475 
1476   // post class unload event
1477   EventClassUnload event(UNTIMED);
1478   event.set_endtime(_class_unload_time);
1479   event.set_unloadedClass(k);
1480   event.set_definingClassLoader(k->class_loader_data());
1481   event.commit();
1482 }
1483 
1484 #endif // INCLUDE_TRACE


  30 // according to its name resolution strategy.
  31 //
  32 // Class loaders that implement a deterministic name resolution strategy
  33 // (including with respect to their delegation behavior), such as the boot, the
  34 // platform, and the system loaders of the JDK's built-in class loader
  35 // hierarchy, always produce the same linkset for a given configuration.
  36 //
  37 // ClassLoaderData carries information related to a linkset (e.g.,
  38 // metaspace holding its klass definitions).
  39 // The System Dictionary and related data structures (e.g., placeholder table,
  40 // loader constraints table) as well as the runtime representation of classes
  41 // only reference ClassLoaderData.
  42 //
  43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that
  44 // that represent the loader's "linking domain" in the JVM.
  45 //
  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"

  50 #include "classfile/classLoaderData.inline.hpp"
  51 #include "classfile/dictionary.hpp"
  52 #include "classfile/javaClasses.hpp"
  53 #include "classfile/metadataOnStackMark.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/systemDictionary.hpp"
  57 #include "code/codeCache.hpp"
  58 #include "gc/shared/gcLocker.hpp"
  59 #include "logging/log.hpp"
  60 #include "logging/logStream.hpp"
  61 #include "memory/metaspace.hpp"
  62 #include "memory/metadataFactory.hpp"

  63 #include "memory/oopFactory.hpp"
  64 #include "memory/resourceArea.hpp"
  65 #include "oops/objArrayOop.inline.hpp"
  66 #include "oops/oop.inline.hpp"
  67 #include "runtime/atomic.hpp"
  68 #include "runtime/javaCalls.hpp"
  69 #include "runtime/jniHandles.hpp"
  70 #include "runtime/mutex.hpp"
  71 #include "runtime/orderAccess.hpp"
  72 #include "runtime/safepoint.hpp"
  73 #include "runtime/synchronizer.hpp"


  74 #include "utilities/ostream.hpp"
  75 #if INCLUDE_ALL_GCS
  76 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
  77 #endif // INCLUDE_ALL_GCS
  78 #if INCLUDE_TRACE
  79 #include "trace/tracing.hpp"
  80 #include "utilities/ticks.hpp"
  81 #endif
  82 
  83 void ClassLoaderData::Dependencies::init(TRAPS) {
  84   // Create empty dependencies array to add to. CMS requires this to be
  85   // an oop so that it can track additions via card marks.  We think.
  86   _list_head = oopFactory::new_objectArray(2, CHECK);
  87 }











  88 
  89 void ClassLoaderData::Dependencies::add(Handle dependency, TRAPS) {
  90   // Check first if this dependency is already in the list.
  91   // Save a pointer to the last to add to under the lock.
  92   objArrayOop ok = _list_head;
  93   objArrayOop last = NULL;
  94   while (ok != NULL) {
  95     last = ok;
  96     if (ok->obj_at(0) == dependency()) {
  97       // Don't need to add it
  98       return;

  99     }
 100     ok = (objArrayOop)ok->obj_at(1);

 101   }
 102 
 103   // Must handle over GC points
 104   assert(last != NULL, "dependencies should be initialized");
 105   objArrayHandle last_handle(THREAD, last);
 106 
 107   // Create a new dependency node with fields for (class_loader or mirror, next)
 108   objArrayOop deps = oopFactory::new_objectArray(2, CHECK);
 109   deps->obj_at_put(0, dependency());
 110 
 111   // Must handle over GC points
 112   objArrayHandle new_dependency(THREAD, deps);
 113 
 114   // Add the dependency under lock
 115   locked_add(last_handle, new_dependency, THREAD);
 116 }
 117 
 118 void ClassLoaderData::Dependencies::locked_add(objArrayHandle last_handle,
 119                                                objArrayHandle new_dependency,
 120                                                Thread* THREAD) {
 121 
 122   // Have to lock and put the new dependency on the end of the dependency
 123   // array so the card mark for CMS sees that this dependency is new.
 124   // Can probably do this lock free with some effort.
 125   ObjectLocker ol(Handle(THREAD, _list_head), THREAD);
 126 
 127   oop loader_or_mirror = new_dependency->obj_at(0);
 128 
 129   // Since the dependencies are only added, add to the end.
 130   objArrayOop end = last_handle();
 131   objArrayOop last = NULL;
 132   while (end != NULL) {
 133     last = end;
 134     // check again if another thread added it to the end.
 135     if (end->obj_at(0) == loader_or_mirror) {
 136       // Don't need to add it
 137       return;
 138     }
 139     end = (objArrayOop)end->obj_at(1);
 140   }
 141   assert(last != NULL, "dependencies should be initialized");
 142   // fill in the first element with the oop in new_dependency.
 143   if (last->obj_at(0) == NULL) {
 144     last->obj_at_put(0, new_dependency->obj_at(0));
 145   } else {
 146     last->obj_at_put(1, new_dependency());
 147   }
 148 }
 149 
 150 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) {
 151   f->do_oop((oop*)&_list_head);


 152 }
 153 
 154 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
 155   Chunk* c = _head;
 156   while (c != NULL) {
 157     Chunk* next = c->_next;
 158     delete c;
 159     c = next;
 160   }
 161 }
 162 
 163 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
 164   if (_head == NULL || _head->_size == Chunk::CAPACITY) {
 165     Chunk* next = new Chunk(_head);
 166     OrderAccess::release_store(&_head, next);
 167   }
 168   oop* handle = &_head->_data[_head->_size];
 169   *handle = o;
 170   OrderAccess::release_store(&_head->_size, _head->_size + 1);
 171   return handle;
 172 }
 173 








 174 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
 175   Chunk* head = OrderAccess::load_acquire(&_head);
 176   if (head != NULL) {
 177     // Must be careful when reading size of head
 178     oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
 179     for (Chunk* c = head->_next; c != NULL; c = c->_next) {
 180       oops_do_chunk(f, c, c->_size);
 181     }
 182   }
 183 }
 184 
 185 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
 186   for (juint i = 0; i < size; i++) {
 187     if (c->_data[i] != NULL) {
 188       f->do_oop(&c->_data[i]);
 189     }
 190   }
 191 }
 192 
 193 #ifdef ASSERT
 194 class VerifyContainsOopClosure : public OopClosure {
 195   oop* _target;
 196   bool _found;
 197 
 198 public:
 199   VerifyContainsOopClosure(oop* target) : _target(target), _found(false) {}
 200 
 201   void do_oop(oop* p) {
 202     if (p == _target) {
 203       _found = true;
 204     }
 205   }
 206 
 207   void do_oop(narrowOop* p) {
 208     // The ChunkedHandleList should not contain any narrowOop
 209     ShouldNotReachHere();
 210   }
 211 
 212   bool found() const {
 213     return _found;
 214   }
 215 };
 216 
 217 bool ClassLoaderData::ChunkedHandleList::contains(oop* p) {
 218   VerifyContainsOopClosure cl(p);
 219   oops_do(&cl);
 220   return cl.found();
 221 }
 222 #endif // ASSERT
 223 
 224 static const int _boot_loader_dictionary_size = 1009;
 225 static const int _default_loader_dictionary_size = 107;


 226 
 227 static Dictionary* create_dictionary(ClassLoaderData* cld) {
 228   assert(cld != NULL, "invariant");
 229   assert(!cld->is_anonymous(), "anonymous class loader data do not have a dictionary");
 230 
 231   int size;
 232   bool resizable = false;
 233   if (ClassLoaderData::the_null_class_loader_data() == NULL) {
 234     size = _boot_loader_dictionary_size;
 235     resizable = true;
 236   } else if (cld->class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 237     size = 1;  // there's only one class in relection class loader and no initiated classes
 238   } else if (cld->is_system_class_loader_data()) {
 239     size = _boot_loader_dictionary_size;
 240     resizable = true;
 241   } else {
 242     size = _default_loader_dictionary_size;
 243     resizable = true;
 244   }
 245   if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces || UseSharedSpaces) {
 246     resizable = false;




 247   }
 248   return new Dictionary(cld, size, resizable);
 249 }
 250 
 251 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;








 252 
 253 void ClassLoaderData::init_null_class_loader_data() {
 254   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
 255   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
 256 
 257   // We explicitly initialize the Dependencies object at a later phase in the initialization
 258   _the_null_class_loader_data = new ClassLoaderData(Handle(), false, Dependencies());
 259   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
 260   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
 261 }
 262 
 263 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) :
 264   _class_loader(h_class_loader()),
 265   _is_anonymous(is_anonymous),
 266   // An anonymous class loader data doesn't have anything to keep
 267   // it from being unloaded during parsing of the anonymous class.
 268   // The null-class-loader should always be kept alive.
 269   _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0),
 270   _metaspace(NULL), _unloading(false), _klasses(NULL),
 271   _modules(NULL), _packages(NULL),
 272   _claimed(0), _modified_oops(true), _accumulated_modified_oops(false),
 273   _jmethod_ids(NULL), _handles(), _deallocate_list(NULL),
 274   _next(NULL), _dependencies(dependencies),
 275   _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true,
 276                             Monitor::_safepoint_check_never)) {
 277 
 278   // A ClassLoaderData created solely for an anonymous class should never have a
 279   // ModuleEntryTable or PackageEntryTable created for it. The defining package
 280   // and module for an anonymous class will be found in its host class.
 281   if (!is_anonymous) {
 282     _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size);
 283     if (h_class_loader.is_null()) {
 284       // Create unnamed module for boot loader
 285       _unnamed_module = ModuleEntry::create_boot_unnamed_module(this);
 286     } else {
 287       // Create unnamed module for all other loaders
 288       _unnamed_module = ModuleEntry::create_unnamed_module(this);
 289     }
 290   } else {
 291     _unnamed_module = NULL;





 292   }

 293 
 294   if (!is_anonymous) {
 295     _dictionary = create_dictionary(this);
 296   } else {
 297     _dictionary = NULL;


 298   }
 299   TRACE_INIT_ID(this);
 300 }
 301 
 302 ClassLoaderData::~ClassLoaderData() {
 303   // Release C heap structures for all the classes.
 304   classes_do(InstanceKlass::release_C_heap_structures);






 305 
 306   // Release C heap allocated hashtable for all the packages.
 307   if (_packages != NULL) {
 308     // Destroy the table itself
 309     delete _packages;
 310     _packages = NULL;


 311   }

 312 
 313   // Release C heap allocated hashtable for all the modules.




 314   if (_modules != NULL) {
 315     // Destroy the table itself
 316     delete _modules;
 317     _modules = NULL;




 318   }

 319 
 320   // Release C heap allocated hashtable for the dictionary
 321   if (_dictionary != NULL) {
 322     // Destroy the table itself
 323     delete _dictionary;
 324     _dictionary = NULL;





 325   }

 326 
 327   if (_unnamed_module != NULL) {
 328     _unnamed_module->delete_unnamed_module();
 329     _unnamed_module = NULL;







 330   }
 331 
 332   // release the metaspace
 333   Metaspace *m = _metaspace;
 334   if (m != NULL) {
 335     _metaspace = NULL;
 336     delete m;















 337   }
 338   // Clear all the JNI handles for methods
 339   // These aren't deallocated and are going to look like a leak, but that's
 340   // needed because we can't really get rid of jmethodIDs because we don't
 341   // know when native code is going to stop using them.  The spec says that
 342   // they're "invalid" but existing programs likely rely on their being
 343   // NULL after class unloading.
 344   if (_jmethod_ids != NULL) {
 345     Method::clear_jmethod_ids(this);
 346   }
 347   // Delete lock
 348   delete _metaspace_lock;
 349 
 350   // Delete free list
 351   if (_deallocate_list != NULL) {
 352     delete _deallocate_list;



















 353   }














 354 }
 355 
 356 void ClassLoaderData::init_dependencies(TRAPS) {
 357   assert(!Universe::is_fully_initialized(), "should only be called when initializing");
 358   assert(is_the_null_class_loader_data(), "should only call this for the null class loader");
 359   _dependencies.init(CHECK);
 360 }





 361 
 362 // Deallocate free metadata on the free list.  How useful the PermGen was!
 363 void ClassLoaderData::free_deallocate_list() {
 364   // Don't need lock, at safepoint
 365   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 366   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 367   if (_deallocate_list == NULL) {


 368     return;
 369   }
 370   // Go backwards because this removes entries that are freed.
 371   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 372     Metadata* m = _deallocate_list->at(i);
 373     if (!m->on_stack()) {
 374       _deallocate_list->remove_at(i);
 375       // There are only three types of metadata that we deallocate directly.
 376       // Cast them so they can be used by the template function.
 377       if (m->is_method()) {
 378         MetadataFactory::free_metadata(this, (Method*)m);
 379       } else if (m->is_constantPool()) {
 380         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 381       } else if (m->is_klass()) {
 382         MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 383       } else {
 384         ShouldNotReachHere();
 385       }




 386     } else {
 387       // Metadata is alive.
 388       // If scratch_class is on stack then it shouldn't be on this list!
 389       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 390         "scratch classes on this list should be dead");
 391       // Also should assert that other metadata on the list was found in handles.
 392     }





 393   }
 394 }
 395 
 396 // This is distinct from free_deallocate_list.  For class loader data that are
 397 // unloading, this frees the C heap memory for constant pools on the list.  If there
 398 // were C heap memory allocated for methods, it would free that too.  The C heap memory
 399 // for InstanceKlasses on this list is freed in the ClassLoaderData destructor.
 400 void ClassLoaderData::unload_deallocate_list() {
 401   // Don't need lock, at safepoint
 402   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 403   assert(is_unloading(), "only called for ClassLoaderData that are unloading");
 404   if (_deallocate_list == NULL) {
 405     return;
 406   }
 407   // Go backwards because this removes entries that are freed.
 408   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 409     Metadata* m = _deallocate_list->at(i);
 410     assert(!m->on_stack(), "wouldn't be unloading if this were so");
 411     _deallocate_list->remove_at(i);
 412     // Only constant pool entries have C heap memory to free.
 413     if (m->is_constantPool()) {
 414       ((ConstantPool*)m)->release_C_heap_structures();
 415     }










 416   }
 417 }
 418 
 419 // Class iterator used by the compiler.  It gets some number of classes at
 420 // a safepoint to decay invocation counters on the methods.
 421 class ClassLoaderDataGraphKlassIteratorStatic {
 422   friend class ClassLoaderData;
 423   friend class ClassLoaderDataGraph;
 424  private:
 425   ClassLoaderData* _current_loader_data;
 426   Klass*           _current_class_entry;



 427 
 428   InstanceKlass* try_get_next_class() {
 429     assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 430     int max_classes = InstanceKlass::number_of_instance_classes();
 431     assert(max_classes > 0, "should not be called with no instance classes");
 432     for (int i = 0; i < max_classes; ) {
 433 
 434       if (_current_class_entry != NULL) {
 435         Klass* k = _current_class_entry;
 436         _current_class_entry = _current_class_entry->next_link();
 437 
 438         if (k->is_instance_klass()) {
 439           InstanceKlass* ik = InstanceKlass::cast(k);
 440           i++;  // count all instance classes found
 441                 // Not yet loaded classes are counted in max_classes
 442                 // but only return loaded classes.
 443           if (ik->is_loaded()) {
 444             return ik;
 445           }
 446         }


 461     // Caller handles NULL.
 462     return NULL;
 463   }
 464 
 465   // If the current class for the static iterator is a class being unloaded or
 466   // deallocated, adjust the current class.
 467   void adjust_saved_class(ClassLoaderData* cld) {
 468     if (_current_loader_data == cld) {
 469       _current_loader_data = cld->next();
 470       if (_current_loader_data != NULL) {
 471         _current_class_entry = _current_loader_data->klasses();
 472       }  // else try_get_next_class will start at the head
 473     }
 474   }
 475 
 476   void adjust_saved_class(Klass* klass) {
 477     if (_current_class_entry == klass) {
 478       _current_class_entry = klass->next_link();
 479     }
 480   }
 481 
 482  public:
 483   ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {}
 484 };
 485 
 486 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator;
 487 






























 488 void ClassLoaderData::unload() {
 489   _unloading = true;
 490 
 491   // Tell serviceability tools these classes are unloading
 492   classes_do(InstanceKlass::notify_unload_class);
 493 
 494   LogTarget(Debug, class, loader, data) lt;
 495   if (lt.is_enabled()) {
 496     ResourceMark rm;
 497     LogStream ls(lt);
 498     ls.print(": unload loader data " INTPTR_FORMAT, p2i(this));
 499     ls.print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()),
 500       loader_name());
 501     if (is_anonymous()) {
 502       ls.print(" for anonymous class  " INTPTR_FORMAT " ", p2i(_klasses));
 503     }
 504     ls.cr();
 505   }
 506 
 507   // Some items on the _deallocate_list need to free their C heap structures
 508   // if they are not already on the _klasses list.
 509   unload_deallocate_list();
 510 
 511   // Clean up global class iterator for compiler
 512   static_klass_iterator.adjust_saved_class(this);
 513 }
 514 
 515 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
 516   // Lock-free access requires load_acquire
 517   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 518     klass_closure->do_klass(k);
 519     assert(k != k->next_link(), "no loops!");
 520   }
 521 }


 522 
 523 void ClassLoaderData::classes_do(void f(Klass * const)) {
 524   // Lock-free access requires load_acquire
 525   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 526     f(k);
 527     assert(k != k->next_link(), "no loops!");
 528   }
 529 }
 530 
 531 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 532   // Lock-free access requires load_acquire
 533   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 534     if (k->is_instance_klass()) {
 535       f(InstanceKlass::cast(k));
 536     }
 537     assert(k != k->next_link(), "no loops!");
 538   }

 539 }
 540 
 541 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
 542   // Lock-free access requires load_acquire
 543   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 544     // Do not filter ArrayKlass oops here...
 545     if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
 546       klass_closure->do_klass(k);












 547     }


 548   }

 549 }
 550 
 551 void ClassLoaderData::methods_do(void f(Method*)) {
 552   // Lock-free access requires load_acquire
 553   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 554     if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
 555       InstanceKlass::cast(k)->methods_do(f);
 556     }
 557   }
 558 }
 559 
 560 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 561   assert_locked_or_safepoint(Module_lock);
 562   if (_unnamed_module != NULL) {
 563     f(_unnamed_module);
 564   }
 565   if (_modules != NULL) {
 566     for (int i = 0; i < _modules->table_size(); i++) {
 567       for (ModuleEntry* entry = _modules->bucket(i);
 568            entry != NULL;
 569            entry = entry->next()) {
 570         f(entry);
 571       }
 572     }
 573   }
 574 }
 575 
 576 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 577   assert_locked_or_safepoint(Module_lock);



 578   if (_packages != NULL) {
 579     for (int i = 0; i < _packages->table_size(); i++) {
 580       for (PackageEntry* entry = _packages->bucket(i);
 581            entry != NULL;
 582            entry = entry->next()) {
 583         f(entry);
 584       }






 585     }






 586   }
 587 }
 588 
 589 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) {
 590   if (must_claim && !claim()) {
 591     return;
 592   }
 593 
 594   // Only clear modified_oops after the ClassLoaderData is claimed.
 595   if (clear_mod_oops) {
 596     clear_modified_oops();











 597   }


 598 
 599   f->do_oop(&_class_loader);
 600   _dependencies.oops_do(f);
 601   _handles.oops_do(f);

 602 }
 603 
 604 // Unloading support
 605 oop ClassLoaderData::keep_alive_object() const {
 606   assert_locked_or_safepoint(_metaspace_lock);
 607   assert(!keep_alive(), "Don't use with CLDs that are artificially kept alive");
 608   return is_anonymous() ? _klasses->java_mirror() : class_loader();
 609 }
 610 
 611 bool ClassLoaderData::is_alive(BoolObjectClosure* is_alive_closure) const {
 612   return keep_alive() // null class loader and incomplete anonymous klasses.
 613          || is_alive_closure->do_object_b(keep_alive_object());
 614 }
 615 
 616 // Anonymous classes have their own ClassLoaderData that is marked to keep alive
 617 // while the class is being parsed, and if the class appears on the module fixup list.
 618 // Due to the uniqueness that no other class shares the anonymous class' name or
 619 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while
 620 // it is being defined, therefore _keep_alive is not volatile or atomic.
 621 void ClassLoaderData::inc_keep_alive() {
 622   if (is_anonymous()) {
 623     assert(_keep_alive >= 0, "Invalid keep alive increment count");
 624     _keep_alive++;
 625   }
 626 }
 627 
 628 void ClassLoaderData::dec_keep_alive() {
 629   if (is_anonymous()) {
 630     assert(_keep_alive > 0, "Invalid keep alive decrement count");
 631     _keep_alive--;
 632   }
 633 }
 634 
 635 bool ClassLoaderData::claim() {
 636   if (_claimed == 1) {
 637     return false;
 638   }
 639 
 640   return (int)Atomic::cmpxchg(1, &_claimed, 0) == 0;
 641 }
 642 
 643 Metaspace* ClassLoaderData::metaspace_non_null() {
 644   // If the metaspace has not been allocated, create a new one.  Might want
 645   // to create smaller arena for Reflection class loaders also.
 646   // The reason for the delayed allocation is because some class loaders are
 647   // simply for delegating with no metadata of their own.
 648   // Lock-free access requires load_acquire.
 649   Metaspace* metaspace = OrderAccess::load_acquire(&_metaspace);
 650   if (metaspace == NULL) {
 651     MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag);
 652     // Check if _metaspace got allocated while we were waiting for this lock.
 653     if ((metaspace = _metaspace) == NULL) {
 654       if (this == the_null_class_loader_data()) {
 655         assert(class_loader() == NULL, "Must be");
 656         metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
 657       } else if (is_anonymous()) {
 658         if (class_loader() != NULL) {
 659           log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name());
 660         }
 661         metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType);
 662       } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
 663         if (class_loader() != NULL) {
 664           log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name());
 665         }
 666         metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
 667       } else {
 668         metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
 669       }
 670       // Ensure _metaspace is stable, since it is examined without a lock
 671       OrderAccess::release_store(&_metaspace, metaspace);
 672     }
 673   }
 674   return metaspace;
 675 }
 676 
 677 // Returns true if this class loader data is for the system class loader.
 678 bool ClassLoaderData::is_system_class_loader_data() const {
 679   return SystemDictionary::is_system_class_loader(class_loader());
 680 }
 681 
 682 // Returns true if this class loader data is for the platform class loader.
 683 bool ClassLoaderData::is_platform_class_loader_data() const {
 684   return SystemDictionary::is_platform_class_loader(class_loader());
 685 }
 686 
 687 // Returns true if this class loader data is one of the 3 builtin
 688 // (boot, application/system or platform) class loaders. Note, the
 689 // builtin loaders are not freed by a GC.
 690 bool ClassLoaderData::is_builtin_class_loader_data() const {
 691   return (is_the_null_class_loader_data() ||
 692     SystemDictionary::is_system_class_loader(class_loader()) ||
 693     SystemDictionary::is_platform_class_loader(class_loader()));
 694 }
 695 
 696 const char* ClassLoaderData::loader_name() {
 697   // Handles null class loader
 698   return SystemDictionary::loader_name(class_loader());
 699 }
 700 
 701 ModuleEntryTable* ClassLoaderData::modules() {
 702   // Lazily create the module entry table at first request.
 703   // Lock-free access requires load_acquire.
 704   ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
 705   if (modules == NULL) {
 706     MutexLocker m1(Module_lock);
 707     // Check if _modules got allocated while we were waiting for this lock.
 708     if ((modules = _modules) == NULL) {
 709       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 710 
 711       {
 712         MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 713         // Ensure _modules is stable, since it is examined without a lock
 714         OrderAccess::release_store(&_modules, modules);
 715       }
 716     }
 717   }
 718   return modules;
 719 }
 720 
 721 OopHandle ClassLoaderData::add_handle(Handle h) {
 722   MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 723   record_modified_oops();
 724   return OopHandle(_handles.add(h()));
 725 }
 726 
 727 void ClassLoaderData::remove_handle(OopHandle h) {
 728   assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading");
 729   oop* ptr = h.ptr_raw();
 730   if (ptr != NULL) {
 731     assert(_handles.contains(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
 732 #if INCLUDE_ALL_GCS
 733     // This barrier is used by G1 to remember the old oop values, so
 734     // that we don't forget any objects that were live at the snapshot at
 735     // the beginning.
 736     if (UseG1GC) {
 737       oop obj = *ptr;
 738       if (obj != NULL) {
 739         G1SATBCardTableModRefBS::enqueue(obj);
 740       }
 741     }
 742 #endif
 743     *ptr = NULL;
 744   }
 745 }
 746 
 747 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) {
 748   MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 749   if (dest.resolve() != NULL) {
 750     return;
 751   } else {
 752     dest = _handles.add(h());
 753   }
 754 }
 755 
 756 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
 757   {



 758     MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 759     Klass* old_value = _klasses;
 760     k->set_next_link(old_value);
 761     // Link the new item into the list, making sure the linked class is stable
 762     // since the list can be walked without a lock
 763     OrderAccess::release_store(&_klasses, k);
 764   }
 765 
 766   if (publicize && k->class_loader_data() != NULL) {
 767     ResourceMark rm;
 768     log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: "
 769                   PTR_FORMAT " loader: " PTR_FORMAT " %s",
 770                   p2i(k),
 771                   k->external_name(),
 772                   p2i(k->class_loader_data()),
 773                   p2i((void *)k->class_loader()),
 774                   loader_name());
 775   }
 776 }
 777 
 778 // Remove a klass from the _klasses list for scratch_class during redefinition
 779 // or parsed class in the case of an error.
 780 void ClassLoaderData::remove_class(Klass* scratch_class) {
 781   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 782 
 783   // Adjust global class iterator.
 784   static_klass_iterator.adjust_saved_class(scratch_class);
 785 
 786   Klass* prev = NULL;
 787   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 788     if (k == scratch_class) {
 789       if (prev == NULL) {
 790         _klasses = k->next_link();








 791       } else {
 792         Klass* next = k->next_link();
 793         prev->set_next_link(next);
 794       }
 795       return;





 796     }
 797     prev = k;
 798     assert(k != k->next_link(), "no loops!");
 799   }
 800   ShouldNotReachHere();   // should have found this class!!
 801 }
 802 
 803 bool ClassLoaderData::contains_klass(Klass* klass) {
 804   // Lock-free access requires load_acquire
 805   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 806     if (k == klass) return true;
 807   }
 808   return false;
 809 }
 810 
 811 void ClassLoaderData::record_dependency(const Klass* k, TRAPS) {
 812   assert(k != NULL, "invariant");
 813 
 814   ClassLoaderData * const from_cld = this;
 815   ClassLoaderData * const to_cld = k->class_loader_data();
 816 
 817   // Dependency to the null class loader data doesn't need to be recorded
 818   // because the null class loader data never goes away.
 819   if (to_cld->is_the_null_class_loader_data()) {
 820     return;
 821   }
 822 
 823   oop to;
 824   if (to_cld->is_anonymous()) {
 825     // Anonymous class dependencies are through the mirror.
 826     to = k->java_mirror();
 827   } else {
 828     to = to_cld->class_loader();
 829 
 830     // If from_cld is anonymous, even if it's class_loader is a parent of 'to'
 831     // we still have to add it.  The class_loader won't keep from_cld alive.
 832     if (!from_cld->is_anonymous()) {
 833       // Check that this dependency isn't from the same or parent class_loader
 834       oop from = from_cld->class_loader();
 835 
 836       oop curr = from;
 837       while (curr != NULL) {
 838         if (curr == to) {
 839           return; // this class loader is in the parent list, no need to add it.
 840         }
 841         curr = java_lang_ClassLoader::parent(curr);
 842       }
 843     }
 844   }

 845 
 846   // It's a dependency we won't find through GC, add it. This is relatively rare
 847   // Must handle over GC point.
 848   Handle dependency(THREAD, to);
 849   from_cld->_dependencies.add(dependency, CHECK);
 850 
 851   // Added a potentially young gen oop to the ClassLoaderData
 852   record_modified_oops();
 853 }
 854 
 855 // Add this metadata pointer to be freed when it's safe.  This is only during
 856 // class unloading because Handles might point to this metadata field.
 857 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
 858   // Metadata in shared region isn't deleted.
 859   if (!m->is_shared()) {
 860     MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 861     if (_deallocate_list == NULL) {
 862       _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true);
 863     }
 864     _deallocate_list->append_if_missing(m);
 865   }
 866 }
 867 
 868 #ifndef PRODUCT
 869 // Define to dump klasses
 870 #undef CLD_DUMP_KLASSES
 871 
 872 void ClassLoaderData::dump(outputStream * const out) {
 873   out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {",
 874       p2i(this), p2i((void *)class_loader()),
 875       p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name());
 876   if (claimed()) out->print(" claimed ");
 877   if (is_unloading()) out->print(" unloading ");
 878   out->cr();
 879   if (metaspace_or_null() != NULL) {
 880     out->print_cr("metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null()));
 881     metaspace_or_null()->dump(out);
 882   } else {
 883     out->print_cr("metaspace: NULL");
 884   }
 885 
 886 #ifdef CLD_DUMP_KLASSES
 887   if (Verbose) {
 888     Klass* k = _klasses;
 889     while (k != NULL) {
 890       out->print_cr("klass " PTR_FORMAT ", %s", p2i(k), k->name()->as_C_string());
 891       assert(k != k->next_link(), "no loops!");
 892       k = k->next_link();
 893     }
 894   }
 895 #endif  // CLD_DUMP_KLASSES
 896 #undef CLD_DUMP_KLASSES
 897   if (_jmethod_ids != NULL) {
 898     Method::print_jmethod_ids(this, out);
 899   }
 900   out->print_cr("}");
 901 }

 902 
 903 void ClassLoaderData::verify() {
 904   assert_locked_or_safepoint(_metaspace_lock);
 905   oop cl = class_loader();
 906 
 907   guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same");
 908   guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be");
 909 
 910   // Verify the integrity of the allocated space.
 911   if (metaspace_or_null() != NULL) {
 912     metaspace_or_null()->verify();
 913   }
 914 
 915   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 916     guarantee(k->class_loader_data() == this, "Must be the same");
 917     k->verify();
 918     assert(k != k->next_link(), "no loops!");
 919   }
 920 }
 921 
 922 #endif // !PRODUCT
 923 
 924 void ClassLoaderData::print_value_on(outputStream* out) const {
 925   if (class_loader() == NULL) {
 926     out->print("NULL class loader");
 927   } else {
 928     out->print("class loader " INTPTR_FORMAT " ", p2i(this));
 929     class_loader()->print_value_on(out);
 930   }
 931 }
 932 
 933 void ClassLoaderData::print_on(outputStream* out) const {
 934   if (class_loader() == NULL) {
 935     out->print("NULL class loader");
 936   } else {
 937     out->print("class loader " INTPTR_FORMAT " ", p2i(this));
 938     class_loader()->print_on(out);
 939   }

 940 }
 941 
 942 
 943 // GC root of class loader data created.
 944 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
 945 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
 946 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
 947 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
 948 
 949 bool ClassLoaderDataGraph::_should_purge = false;
 950 bool ClassLoaderDataGraph::_metaspace_oom = false;
 951 
 952 static void print_creation(outputStream* out, Handle loader, ClassLoaderData* cld, TRAPS) {
 953   Handle string;
 954   if (loader.not_null()) {
 955     // Include the result of loader.toString() in the output. This allows
 956     // the user of the log to identify the class loader instance.
 957     JavaValue result(T_OBJECT);
 958     Klass* spec_klass = SystemDictionary::ClassLoader_klass();
 959     JavaCalls::call_virtual(&result,
 960                             loader,
 961                             spec_klass,
 962                             vmSymbols::toString_name(),
 963                             vmSymbols::void_string_signature(),
 964                             CHECK);
 965     assert(result.get_type() == T_OBJECT, "just checking");
 966     string = Handle(THREAD, (oop)result.get_jobject());
 967   }
 968 
 969   ResourceMark rm;
 970   out->print("create class loader data " INTPTR_FORMAT, p2i(cld));
 971   out->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)cld->class_loader()),
 972     cld->loader_name());
 973 
 974   if (string.not_null()) {
 975     out->print(": ");
 976     java_lang_String::print(string(), out);
 977   }
 978   out->cr();
 979 }
 980 
 981 // Add a new class loader data node to the list.  Assign the newly created
 982 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
 983 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) {
 984   // We need to allocate all the oops for the ClassLoaderData before allocating the
 985   // actual ClassLoaderData object.
 986   ClassLoaderData::Dependencies dependencies(CHECK_NULL);
 987 
 988   NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the
 989                                      // ClassLoaderData in the graph since the CLD
 990                                      // contains unhandled oops
 991 
 992   ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies);
 993 
 994 
 995   if (!is_anonymous) {
 996     ClassLoaderData** cld_addr = java_lang_ClassLoader::loader_data_addr(loader());
 997     // First, Atomically set it
 998     ClassLoaderData* old = Atomic::cmpxchg(cld, cld_addr, (ClassLoaderData*)NULL);
 999     if (old != NULL) {
1000       delete cld;


1007   // class loader data
1008   ClassLoaderData** list_head = &_head;
1009   ClassLoaderData* next = _head;
1010 
1011   do {
1012     cld->set_next(next);
1013     ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next);
1014     if (exchanged == next) {
1015       LogTarget(Debug, class, loader, data) lt;
1016       if (lt.is_enabled()) {
1017        PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual
1018        LogStream ls(lt);
1019        print_creation(&ls, loader, cld, CHECK_NULL);
1020       }
1021       return cld;
1022     }
1023     next = exchanged;
1024   } while (true);
1025 }
1026 






























1027 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) {
1028   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1029     cld->oops_do(f, must_claim);
1030   }
1031 }
1032 
1033 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) {
1034   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1035     if (cld->keep_alive()) {
1036       cld->oops_do(f, must_claim);
1037     }
1038   }
1039 }
1040 
1041 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) {
1042   if (ClassUnloading) {
1043     keep_alive_oops_do(f, must_claim);
1044   } else {
1045     oops_do(f, must_claim);
1046   }


1219         ls.cr();
1220       }
1221     }
1222 
1223     curr = curr->_next;
1224   }
1225 
1226   return array;
1227 }
1228 
1229 bool ClassLoaderDataGraph::unload_list_contains(const void* x) {
1230   assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint");
1231   for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) {
1232     if (cld->metaspace_or_null() != NULL && cld->metaspace_or_null()->contains(x)) {
1233       return true;
1234     }
1235   }
1236   return false;
1237 }
1238 
1239 #if INCLUDE_TRACE
1240 static Ticks class_unload_time;
1241 static void post_class_unload_event(Klass* const k) {
1242   assert(k != NULL, "invariant");
1243   EventClassUnload event(UNTIMED);
1244   event.set_endtime(class_unload_time);
1245   event.set_unloadedClass(k);
1246   event.set_definingClassLoader(k->class_loader_data());
1247   event.commit();
1248 }
1249 #endif // INCLUDE_TRACE
1250 
1251 static void post_class_unload_events() {
1252 #if INCLUDE_TRACE
1253   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1254   if (Tracing::enabled()) {
1255     if (Tracing::is_event_enabled(TraceClassUnloadEvent)) {
1256       class_unload_time = Ticks::now();
1257       ClassLoaderDataGraph::classes_unloading_do(&post_class_unload_event);
1258     }
1259     Tracing::on_unloading_classes();
1260   }
1261 #endif // INCLUDE_TRACE

1262 }


1263 
1264 // Move class loader data from main list to the unloaded list for unloading
1265 // and deallocation later.
1266 bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure,
1267                                         bool clean_previous_versions) {
1268 
1269   ClassLoaderData* data = _head;
1270   ClassLoaderData* prev = NULL;
1271   bool seen_dead_loader = false;
1272 
1273   // Mark metadata seen on the stack only so we can delete unneeded entries.
1274   // Only walk all metadata, including the expensive code cache walk, for Full GC
1275   // and only if class redefinition and if there's previous versions of
1276   // Klasses to delete.
1277   bool walk_all_metadata = clean_previous_versions &&
1278                            JvmtiExport::has_redefined_a_class() &&
1279                            InstanceKlass::has_previous_versions_and_reset();
1280   MetadataOnStackMark md_on_stack(walk_all_metadata);
1281 
1282   // Save previous _unloading pointer for CMS which may add to unloading list before


1342 }
1343 
1344 void ClassLoaderDataGraph::purge() {
1345   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1346   ClassLoaderData* list = _unloading;
1347   _unloading = NULL;
1348   ClassLoaderData* next = list;
1349   bool classes_unloaded = false;
1350   while (next != NULL) {
1351     ClassLoaderData* purge_me = next;
1352     next = purge_me->next();
1353     delete purge_me;
1354     classes_unloaded = true;
1355   }
1356   if (classes_unloaded) {
1357     Metaspace::purge();
1358     set_metaspace_oom(false);
1359   }
1360 }
1361 
1362 void ClassLoaderDataGraph::clear_claimed_marks() {
1363   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1364     cld->clear_claimed();
1365   }
1366 }
1367 
1368 int ClassLoaderDataGraph::resize_if_needed() {
1369   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1370   int resized = 0;
1371   if (Dictionary::does_any_dictionary_needs_resizing()) {
1372     FOR_ALL_DICTIONARY(cld) {
1373       if (cld->dictionary()->resize_if_needed()) {
1374         resized++;
1375       }
1376     }
1377   }
1378   return resized;
1379 }
1380 
1381 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() {
1382   return static_klass_iterator.try_get_next_class();
1383 }
1384 
1385 #ifndef PRODUCT
1386 void ClassLoaderDataGraph::verify() {
1387   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1388     data->verify();
1389   }
1390 }
1391 
1392 void ClassLoaderDataGraph::dump_on(outputStream * const out) {
1393   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1394     data->dump(out);
1395   }
1396   MetaspaceAux::dump(out);
1397 }
1398 
1399 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) {
1400   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1401     if (loader_data == data) {
1402       return true;
1403     }
1404   }
1405   return false;
1406 }
1407 
1408 // callable from debugger
1409 extern "C" int print_loader_data_graph() {
1410   ClassLoaderDataGraph::dump_on(tty);
1411   return 0;
1412 }
1413 
1414 #endif // !PRODUCT
1415 
1416 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic()
1417   : _next_klass(NULL) {
1418   ClassLoaderData* cld = ClassLoaderDataGraph::_head;
1419   Klass* klass = NULL;
1420 
1421   // Find the first klass in the CLDG.
1422   while (cld != NULL) {
1423     assert_locked_or_safepoint(cld->metaspace_lock());
1424     klass = cld->_klasses;
1425     if (klass != NULL) {
1426       _next_klass = klass;
1427       return;
1428     }
1429     cld = cld->next();
1430   }
1431 }
1432 
1433 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) {
1434   Klass* next = klass->next_link();
1435   if (next != NULL) {


1453 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() {
1454   Klass* head = _next_klass;
1455 
1456   while (head != NULL) {
1457     Klass* next = next_klass_in_cldg(head);
1458 
1459     Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head);
1460 
1461     if (old_head == head) {
1462       return head; // Won the CAS.
1463     }
1464 
1465     head = old_head;
1466   }
1467 
1468   // Nothing more for the iterator to hand out.
1469   assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1470   return NULL;
1471 }
1472 
1473 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() :
1474   _data(ClassLoaderDataGraph::_head) {
































1475 }


























< prev index next >