1 /* 2 * Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 // A ClassLoaderData identifies the full set of class types that a class 26 // loader's name resolution strategy produces for a given configuration of the 27 // class loader. 28 // Class types in the ClassLoaderData may be defined by from class file binaries 29 // provided by the class loader, or from other class loader it interacts with 30 // according to its name resolution strategy. 31 // 32 // Class loaders that implement a deterministic name resolution strategy 33 // (including with respect to their delegation behavior), such as the boot, the 34 // platform, and the system loaders of the JDK's built-in class loader 35 // hierarchy, always produce the same linkset for a given configuration. 36 // 37 // ClassLoaderData carries information related to a linkset (e.g., 38 // metaspace holding its klass definitions). 39 // The System Dictionary and related data structures (e.g., placeholder table, 40 // loader constraints table) as well as the runtime representation of classes 41 // only reference ClassLoaderData. 42 // 43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that 44 // that represent the loader's "linking domain" in the JVM. 45 // 46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData, 47 // the singleton class the_null_class_loader_data(). 48 49 #include "precompiled.hpp" 50 #include "classfile/classLoaderData.hpp" 51 #include "classfile/classLoaderData.inline.hpp" 52 #include "classfile/dictionary.hpp" 53 #include "classfile/javaClasses.hpp" 54 #include "classfile/metadataOnStackMark.hpp" 55 #include "classfile/moduleEntry.hpp" 56 #include "classfile/packageEntry.hpp" 57 #include "classfile/systemDictionary.hpp" 58 #include "code/codeCache.hpp" 59 #include "gc/shared/gcLocker.hpp" 60 #include "logging/log.hpp" 61 #include "logging/logStream.hpp" 62 #include "memory/allocation.inline.hpp" 63 #include "memory/metadataFactory.hpp" 64 #include "memory/metaspaceShared.hpp" 65 #include "memory/oopFactory.hpp" 66 #include "memory/resourceArea.hpp" 67 #include "oops/access.inline.hpp" 68 #include "oops/objArrayOop.inline.hpp" 69 #include "oops/oop.inline.hpp" 70 #include "runtime/atomic.hpp" 71 #include "runtime/handles.inline.hpp" 72 #include "runtime/javaCalls.hpp" 73 #include "runtime/jniHandles.hpp" 74 #include "runtime/mutex.hpp" 75 #include "runtime/orderAccess.hpp" 76 #include "runtime/safepoint.hpp" 77 #include "runtime/synchronizer.hpp" 78 #include "utilities/growableArray.hpp" 79 #include "utilities/macros.hpp" 80 #include "utilities/ostream.hpp" 81 #if INCLUDE_TRACE 82 #include "trace/tracing.hpp" 83 #endif 84 85 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0; 86 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0; 87 88 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL; 89 90 void ClassLoaderData::init_null_class_loader_data() { 91 assert(_the_null_class_loader_data == NULL, "cannot initialize twice"); 92 assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice"); 93 94 _the_null_class_loader_data = new ClassLoaderData(Handle(), false); 95 ClassLoaderDataGraph::_head = _the_null_class_loader_data; 96 assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be"); 97 98 LogTarget(Debug, class, loader, data) lt; 99 if (lt.is_enabled()) { 100 ResourceMark rm; 101 LogStream ls(lt); 102 ls.print("create "); 103 _the_null_class_loader_data->print_value_on(&ls); 104 ls.cr(); 105 } 106 } 107 108 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous) : 109 _class_loader(h_class_loader()), 110 _is_anonymous(is_anonymous), 111 // An anonymous class loader data doesn't have anything to keep 112 // it from being unloaded during parsing of the anonymous class. 113 // The null-class-loader should always be kept alive. 114 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0), 115 _metaspace(NULL), _unloading(false), _klasses(NULL), 116 _modules(NULL), _packages(NULL), 117 _claimed(0), _modified_oops(true), _accumulated_modified_oops(false), 118 _jmethod_ids(NULL), _handles(), _deallocate_list(NULL), 119 _next(NULL), 120 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true, 121 Monitor::_safepoint_check_never)) { 122 123 // A ClassLoaderData created solely for an anonymous class should never have a 124 // ModuleEntryTable or PackageEntryTable created for it. The defining package 125 // and module for an anonymous class will be found in its host class. 126 if (!is_anonymous) { 127 _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size); 128 if (h_class_loader.is_null()) { 129 // Create unnamed module for boot loader 130 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this); 131 } else { 132 // Create unnamed module for all other loaders 133 _unnamed_module = ModuleEntry::create_unnamed_module(this); 134 } 135 _dictionary = create_dictionary(); 136 } else { 137 _packages = NULL; 138 _unnamed_module = NULL; 139 _dictionary = NULL; 140 } 141 142 NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies 143 144 TRACE_INIT_ID(this); 145 } 146 147 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() { 148 Chunk* c = _head; 149 while (c != NULL) { 150 Chunk* next = c->_next; 151 delete c; 152 c = next; 153 } 154 } 155 156 oop* ClassLoaderData::ChunkedHandleList::add(oop o) { 157 if (_head == NULL || _head->_size == Chunk::CAPACITY) { 158 Chunk* next = new Chunk(_head); 159 OrderAccess::release_store(&_head, next); 160 } 161 oop* handle = &_head->_data[_head->_size]; 162 *handle = o; 163 OrderAccess::release_store(&_head->_size, _head->_size + 1); 164 return handle; 165 } 166 167 int ClassLoaderData::ChunkedHandleList::count() const { 168 int count = 0; 169 Chunk* chunk = _head; 170 while (chunk != NULL) { 171 count += chunk->_size; 172 chunk = chunk->_next; 173 } 174 return count; 175 } 176 177 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) { 178 for (juint i = 0; i < size; i++) { 179 if (c->_data[i] != NULL) { 180 f->do_oop(&c->_data[i]); 181 } 182 } 183 } 184 185 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) { 186 Chunk* head = OrderAccess::load_acquire(&_head); 187 if (head != NULL) { 188 // Must be careful when reading size of head 189 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size)); 190 for (Chunk* c = head->_next; c != NULL; c = c->_next) { 191 oops_do_chunk(f, c, c->_size); 192 } 193 } 194 } 195 196 class VerifyContainsOopClosure : public OopClosure { 197 oop _target; 198 bool _found; 199 200 public: 201 VerifyContainsOopClosure(oop target) : _target(target), _found(false) {} 202 203 void do_oop(oop* p) { 204 if (p != NULL && *p == _target) { 205 _found = true; 206 } 207 } 208 209 void do_oop(narrowOop* p) { 210 // The ChunkedHandleList should not contain any narrowOop 211 ShouldNotReachHere(); 212 } 213 214 bool found() const { 215 return _found; 216 } 217 }; 218 219 bool ClassLoaderData::ChunkedHandleList::contains(oop p) { 220 VerifyContainsOopClosure cl(p); 221 oops_do(&cl); 222 return cl.found(); 223 } 224 225 #ifndef PRODUCT 226 bool ClassLoaderData::ChunkedHandleList::owner_of(oop* oop_handle) { 227 Chunk* chunk = _head; 228 while (chunk != NULL) { 229 if (&(chunk->_data[0]) <= oop_handle && oop_handle < &(chunk->_data[chunk->_size])) { 230 return true; 231 } 232 chunk = chunk->_next; 233 } 234 return false; 235 } 236 #endif // PRODUCT 237 238 bool ClassLoaderData::claim() { 239 if (_claimed == 1) { 240 return false; 241 } 242 243 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0; 244 } 245 246 // Anonymous classes have their own ClassLoaderData that is marked to keep alive 247 // while the class is being parsed, and if the class appears on the module fixup list. 248 // Due to the uniqueness that no other class shares the anonymous class' name or 249 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while 250 // it is being defined, therefore _keep_alive is not volatile or atomic. 251 void ClassLoaderData::inc_keep_alive() { 252 if (is_anonymous()) { 253 assert(_keep_alive >= 0, "Invalid keep alive increment count"); 254 _keep_alive++; 255 } 256 } 257 258 void ClassLoaderData::dec_keep_alive() { 259 if (is_anonymous()) { 260 assert(_keep_alive > 0, "Invalid keep alive decrement count"); 261 _keep_alive--; 262 } 263 } 264 265 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) { 266 if (must_claim && !claim()) { 267 return; 268 } 269 270 // Only clear modified_oops after the ClassLoaderData is claimed. 271 if (clear_mod_oops) { 272 clear_modified_oops(); 273 } 274 275 f->do_oop(&_class_loader); 276 _handles.oops_do(f); 277 } 278 279 void ClassLoaderData::classes_do(KlassClosure* klass_closure) { 280 // Lock-free access requires load_acquire 281 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 282 klass_closure->do_klass(k); 283 assert(k != k->next_link(), "no loops!"); 284 } 285 } 286 287 void ClassLoaderData::classes_do(void f(Klass * const)) { 288 // Lock-free access requires load_acquire 289 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 290 f(k); 291 assert(k != k->next_link(), "no loops!"); 292 } 293 } 294 295 void ClassLoaderData::methods_do(void f(Method*)) { 296 // Lock-free access requires load_acquire 297 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 298 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) { 299 InstanceKlass::cast(k)->methods_do(f); 300 } 301 } 302 } 303 304 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) { 305 // Lock-free access requires load_acquire 306 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 307 // Do not filter ArrayKlass oops here... 308 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) { 309 klass_closure->do_klass(k); 310 } 311 } 312 } 313 314 void ClassLoaderData::classes_do(void f(InstanceKlass*)) { 315 // Lock-free access requires load_acquire 316 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 317 if (k->is_instance_klass()) { 318 f(InstanceKlass::cast(k)); 319 } 320 assert(k != k->next_link(), "no loops!"); 321 } 322 } 323 324 void ClassLoaderData::modules_do(void f(ModuleEntry*)) { 325 assert_locked_or_safepoint(Module_lock); 326 if (_unnamed_module != NULL) { 327 f(_unnamed_module); 328 } 329 if (_modules != NULL) { 330 for (int i = 0; i < _modules->table_size(); i++) { 331 for (ModuleEntry* entry = _modules->bucket(i); 332 entry != NULL; 333 entry = entry->next()) { 334 f(entry); 335 } 336 } 337 } 338 } 339 340 void ClassLoaderData::packages_do(void f(PackageEntry*)) { 341 assert_locked_or_safepoint(Module_lock); 342 if (_packages != NULL) { 343 for (int i = 0; i < _packages->table_size(); i++) { 344 for (PackageEntry* entry = _packages->bucket(i); 345 entry != NULL; 346 entry = entry->next()) { 347 f(entry); 348 } 349 } 350 } 351 } 352 353 void ClassLoaderData::record_dependency(const Klass* k) { 354 assert(k != NULL, "invariant"); 355 356 ClassLoaderData * const from_cld = this; 357 ClassLoaderData * const to_cld = k->class_loader_data(); 358 359 // Do not need to record dependency if the dependency is to a class whose 360 // class loader data is never freed. (i.e. the dependency's class loader 361 // is one of the three builtin class loaders and the dependency is not 362 // anonymous.) 363 if (to_cld->is_permanent_class_loader_data()) { 364 return; 365 } 366 367 oop to; 368 if (to_cld->is_anonymous()) { 369 // Just return if an anonymous class is attempting to record a dependency 370 // to itself. (Note that every anonymous class has its own unique class 371 // loader data.) 372 if (to_cld == from_cld) { 373 return; 374 } 375 // Anonymous class dependencies are through the mirror. 376 to = k->java_mirror(); 377 } else { 378 to = to_cld->class_loader(); 379 oop from = from_cld->class_loader(); 380 381 // Just return if this dependency is to a class with the same or a parent 382 // class_loader. 383 if (from == to || java_lang_ClassLoader::isAncestor(from, to)) { 384 return; // this class loader is in the parent list, no need to add it. 385 } 386 } 387 388 // It's a dependency we won't find through GC, add it. 389 if (!_handles.contains(to)) { 390 NOT_PRODUCT(Atomic::inc(&_dependency_count)); 391 LogTarget(Trace, class, loader, data) lt; 392 if (lt.is_enabled()) { 393 ResourceMark rm; 394 LogStream ls(lt); 395 ls.print("adding dependency from "); 396 print_value_on(&ls); 397 ls.print(" to "); 398 to_cld->print_value_on(&ls); 399 ls.cr(); 400 } 401 Handle dependency(Thread::current(), to); 402 add_handle(dependency); 403 // Added a potentially young gen oop to the ClassLoaderData 404 record_modified_oops(); 405 } 406 } 407 408 409 void ClassLoaderDataGraph::clear_claimed_marks() { 410 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 411 cld->clear_claimed(); 412 } 413 } 414 415 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) { 416 { 417 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 418 Klass* old_value = _klasses; 419 k->set_next_link(old_value); 420 // Link the new item into the list, making sure the linked class is stable 421 // since the list can be walked without a lock 422 OrderAccess::release_store(&_klasses, k); 423 if (k->is_array_klass()) { 424 ClassLoaderDataGraph::inc_array_classes(1); 425 } else { 426 ClassLoaderDataGraph::inc_instance_classes(1); 427 } 428 } 429 430 if (publicize) { 431 LogTarget(Trace, class, loader, data) lt; 432 if (lt.is_enabled()) { 433 ResourceMark rm; 434 LogStream ls(lt); 435 ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name()); 436 print_value_on(&ls); 437 ls.cr(); 438 } 439 } 440 } 441 442 // Class iterator used by the compiler. It gets some number of classes at 443 // a safepoint to decay invocation counters on the methods. 444 class ClassLoaderDataGraphKlassIteratorStatic { 445 ClassLoaderData* _current_loader_data; 446 Klass* _current_class_entry; 447 public: 448 449 ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {} 450 451 InstanceKlass* try_get_next_class() { 452 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 453 size_t max_classes = ClassLoaderDataGraph::num_instance_classes(); 454 assert(max_classes > 0, "should not be called with no instance classes"); 455 for (size_t i = 0; i < max_classes; ) { 456 457 if (_current_class_entry != NULL) { 458 Klass* k = _current_class_entry; 459 _current_class_entry = _current_class_entry->next_link(); 460 461 if (k->is_instance_klass()) { 462 InstanceKlass* ik = InstanceKlass::cast(k); 463 i++; // count all instance classes found 464 // Not yet loaded classes are counted in max_classes 465 // but only return loaded classes. 466 if (ik->is_loaded()) { 467 return ik; 468 } 469 } 470 } else { 471 // Go to next CLD 472 if (_current_loader_data != NULL) { 473 _current_loader_data = _current_loader_data->next(); 474 } 475 // Start at the beginning 476 if (_current_loader_data == NULL) { 477 _current_loader_data = ClassLoaderDataGraph::_head; 478 } 479 480 _current_class_entry = _current_loader_data->klasses(); 481 } 482 } 483 // Should never be reached unless all instance classes have failed or are not fully loaded. 484 // Caller handles NULL. 485 return NULL; 486 } 487 488 // If the current class for the static iterator is a class being unloaded or 489 // deallocated, adjust the current class. 490 void adjust_saved_class(ClassLoaderData* cld) { 491 if (_current_loader_data == cld) { 492 _current_loader_data = cld->next(); 493 if (_current_loader_data != NULL) { 494 _current_class_entry = _current_loader_data->klasses(); 495 } // else try_get_next_class will start at the head 496 } 497 } 498 499 void adjust_saved_class(Klass* klass) { 500 if (_current_class_entry == klass) { 501 _current_class_entry = klass->next_link(); 502 } 503 } 504 }; 505 506 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator; 507 508 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() { 509 return static_klass_iterator.try_get_next_class(); 510 } 511 512 513 // Remove a klass from the _klasses list for scratch_class during redefinition 514 // or parsed class in the case of an error. 515 void ClassLoaderData::remove_class(Klass* scratch_class) { 516 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 517 518 // Adjust global class iterator. 519 static_klass_iterator.adjust_saved_class(scratch_class); 520 521 Klass* prev = NULL; 522 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 523 if (k == scratch_class) { 524 if (prev == NULL) { 525 _klasses = k->next_link(); 526 } else { 527 Klass* next = k->next_link(); 528 prev->set_next_link(next); 529 } 530 531 if (k->is_array_klass()) { 532 ClassLoaderDataGraph::dec_array_classes(1); 533 } else { 534 ClassLoaderDataGraph::dec_instance_classes(1); 535 } 536 537 return; 538 } 539 prev = k; 540 assert(k != k->next_link(), "no loops!"); 541 } 542 ShouldNotReachHere(); // should have found this class!! 543 } 544 545 void ClassLoaderData::unload() { 546 _unloading = true; 547 548 // Tell serviceability tools these classes are unloading 549 classes_do(InstanceKlass::notify_unload_class); 550 551 LogTarget(Debug, class, loader, data) lt; 552 if (lt.is_enabled()) { 553 ResourceMark rm; 554 LogStream ls(lt); 555 ls.print("unload "); 556 print_value_on(&ls); 557 ls.cr(); 558 } 559 560 // Some items on the _deallocate_list need to free their C heap structures 561 // if they are not already on the _klasses list. 562 unload_deallocate_list(); 563 564 // Clean up global class iterator for compiler 565 static_klass_iterator.adjust_saved_class(this); 566 } 567 568 ModuleEntryTable* ClassLoaderData::modules() { 569 // Lazily create the module entry table at first request. 570 // Lock-free access requires load_acquire. 571 ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules); 572 if (modules == NULL) { 573 MutexLocker m1(Module_lock); 574 // Check if _modules got allocated while we were waiting for this lock. 575 if ((modules = _modules) == NULL) { 576 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size); 577 578 { 579 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 580 // Ensure _modules is stable, since it is examined without a lock 581 OrderAccess::release_store(&_modules, modules); 582 } 583 } 584 } 585 return modules; 586 } 587 588 const int _boot_loader_dictionary_size = 1009; 589 const int _default_loader_dictionary_size = 107; 590 591 Dictionary* ClassLoaderData::create_dictionary() { 592 assert(!is_anonymous(), "anonymous class loader data do not have a dictionary"); 593 int size; 594 bool resizable = false; 595 if (_the_null_class_loader_data == NULL) { 596 size = _boot_loader_dictionary_size; 597 resizable = true; 598 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 599 size = 1; // there's only one class in relection class loader and no initiated classes 600 } else if (is_system_class_loader_data()) { 601 size = _boot_loader_dictionary_size; 602 resizable = true; 603 } else { 604 size = _default_loader_dictionary_size; 605 resizable = true; 606 } 607 if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces || UseSharedSpaces) { 608 resizable = false; 609 } 610 return new Dictionary(this, size, resizable); 611 } 612 613 // Unloading support 614 oop ClassLoaderData::keep_alive_object() const { 615 assert_locked_or_safepoint(_metaspace_lock); 616 assert(!keep_alive(), "Don't use with CLDs that are artificially kept alive"); 617 return is_anonymous() ? _klasses->java_mirror() : class_loader(); 618 } 619 620 bool ClassLoaderData::is_alive(BoolObjectClosure* is_alive_closure) const { 621 bool alive = keep_alive() // null class loader and incomplete anonymous klasses. 622 || is_alive_closure->do_object_b(keep_alive_object()); 623 624 return alive; 625 } 626 627 class ReleaseKlassClosure: public KlassClosure { 628 private: 629 size_t _instance_class_released; 630 size_t _array_class_released; 631 public: 632 ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { } 633 634 size_t instance_class_released() const { return _instance_class_released; } 635 size_t array_class_released() const { return _array_class_released; } 636 637 void do_klass(Klass* k) { 638 if (k->is_array_klass()) { 639 _array_class_released ++; 640 } else { 641 assert(k->is_instance_klass(), "Must be"); 642 _instance_class_released ++; 643 InstanceKlass::release_C_heap_structures(InstanceKlass::cast(k)); 644 } 645 } 646 }; 647 648 ClassLoaderData::~ClassLoaderData() { 649 // Release C heap structures for all the classes. 650 ReleaseKlassClosure cl; 651 classes_do(&cl); 652 653 ClassLoaderDataGraph::dec_array_classes(cl.array_class_released()); 654 ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released()); 655 656 // Release C heap allocated hashtable for all the packages. 657 if (_packages != NULL) { 658 // Destroy the table itself 659 delete _packages; 660 _packages = NULL; 661 } 662 663 // Release C heap allocated hashtable for all the modules. 664 if (_modules != NULL) { 665 // Destroy the table itself 666 delete _modules; 667 _modules = NULL; 668 } 669 670 // Release C heap allocated hashtable for the dictionary 671 if (_dictionary != NULL) { 672 // Destroy the table itself 673 delete _dictionary; 674 _dictionary = NULL; 675 } 676 677 if (_unnamed_module != NULL) { 678 _unnamed_module->delete_unnamed_module(); 679 _unnamed_module = NULL; 680 } 681 682 // release the metaspace 683 ClassLoaderMetaspace *m = _metaspace; 684 if (m != NULL) { 685 _metaspace = NULL; 686 delete m; 687 } 688 // Clear all the JNI handles for methods 689 // These aren't deallocated and are going to look like a leak, but that's 690 // needed because we can't really get rid of jmethodIDs because we don't 691 // know when native code is going to stop using them. The spec says that 692 // they're "invalid" but existing programs likely rely on their being 693 // NULL after class unloading. 694 if (_jmethod_ids != NULL) { 695 Method::clear_jmethod_ids(this); 696 } 697 // Delete lock 698 delete _metaspace_lock; 699 700 // Delete free list 701 if (_deallocate_list != NULL) { 702 delete _deallocate_list; 703 } 704 } 705 706 // Returns true if this class loader data is for the app class loader 707 // or a user defined system class loader. (Note that the class loader 708 // data may be anonymous.) 709 bool ClassLoaderData::is_system_class_loader_data() const { 710 return SystemDictionary::is_system_class_loader(class_loader()); 711 } 712 713 // Returns true if this class loader data is for the platform class loader. 714 // (Note that the class loader data may be anonymous.) 715 bool ClassLoaderData::is_platform_class_loader_data() const { 716 return SystemDictionary::is_platform_class_loader(class_loader()); 717 } 718 719 // Returns true if the class loader for this class loader data is one of 720 // the 3 builtin (boot application/system or platform) class loaders, 721 // including a user-defined system class loader. Note that if the class 722 // loader data is for an anonymous class then it may get freed by a GC 723 // even if its class loader is one of these loaders. 724 bool ClassLoaderData::is_builtin_class_loader_data() const { 725 return (is_boot_class_loader_data() || 726 SystemDictionary::is_system_class_loader(class_loader()) || 727 SystemDictionary::is_platform_class_loader(class_loader())); 728 } 729 730 // Returns true if this class loader data is a class loader data 731 // that is not ever freed by a GC. It must be one of the builtin 732 // class loaders and not anonymous. 733 bool ClassLoaderData::is_permanent_class_loader_data() const { 734 return is_builtin_class_loader_data() && !is_anonymous(); 735 } 736 737 ClassLoaderMetaspace* ClassLoaderData::metaspace_non_null() { 738 // If the metaspace has not been allocated, create a new one. Might want 739 // to create smaller arena for Reflection class loaders also. 740 // The reason for the delayed allocation is because some class loaders are 741 // simply for delegating with no metadata of their own. 742 // Lock-free access requires load_acquire. 743 ClassLoaderMetaspace* metaspace = OrderAccess::load_acquire(&_metaspace); 744 if (metaspace == NULL) { 745 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); 746 // Check if _metaspace got allocated while we were waiting for this lock. 747 if ((metaspace = _metaspace) == NULL) { 748 if (this == the_null_class_loader_data()) { 749 assert (class_loader() == NULL, "Must be"); 750 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::BootMetaspaceType); 751 } else if (is_anonymous()) { 752 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); 753 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 754 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); 755 } else { 756 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); 757 } 758 // Ensure _metaspace is stable, since it is examined without a lock 759 OrderAccess::release_store(&_metaspace, metaspace); 760 } 761 } 762 return metaspace; 763 } 764 765 OopHandle ClassLoaderData::add_handle(Handle h) { 766 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 767 record_modified_oops(); 768 return OopHandle(_handles.add(h())); 769 } 770 771 void ClassLoaderData::remove_handle(OopHandle h) { 772 assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading"); 773 oop* ptr = h.ptr_raw(); 774 if (ptr != NULL) { 775 assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr)); 776 // This root is not walked in safepoints, and hence requires an appropriate 777 // decorator that e.g. maintains the SATB invariant in SATB collectors. 778 RootAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, oop(NULL)); 779 } 780 } 781 782 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) { 783 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 784 if (dest.resolve() != NULL) { 785 return; 786 } else { 787 dest = _handles.add(h()); 788 } 789 } 790 791 // Add this metadata pointer to be freed when it's safe. This is only during 792 // class unloading because Handles might point to this metadata field. 793 void ClassLoaderData::add_to_deallocate_list(Metadata* m) { 794 // Metadata in shared region isn't deleted. 795 if (!m->is_shared()) { 796 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 797 if (_deallocate_list == NULL) { 798 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); 799 } 800 _deallocate_list->append_if_missing(m); 801 } 802 } 803 804 // Deallocate free metadata on the free list. How useful the PermGen was! 805 void ClassLoaderData::free_deallocate_list() { 806 // Don't need lock, at safepoint 807 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 808 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading"); 809 if (_deallocate_list == NULL) { 810 return; 811 } 812 // Go backwards because this removes entries that are freed. 813 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 814 Metadata* m = _deallocate_list->at(i); 815 if (!m->on_stack()) { 816 _deallocate_list->remove_at(i); 817 // There are only three types of metadata that we deallocate directly. 818 // Cast them so they can be used by the template function. 819 if (m->is_method()) { 820 MetadataFactory::free_metadata(this, (Method*)m); 821 } else if (m->is_constantPool()) { 822 MetadataFactory::free_metadata(this, (ConstantPool*)m); 823 } else if (m->is_klass()) { 824 MetadataFactory::free_metadata(this, (InstanceKlass*)m); 825 } else { 826 ShouldNotReachHere(); 827 } 828 } else { 829 // Metadata is alive. 830 // If scratch_class is on stack then it shouldn't be on this list! 831 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), 832 "scratch classes on this list should be dead"); 833 // Also should assert that other metadata on the list was found in handles. 834 } 835 } 836 } 837 838 // This is distinct from free_deallocate_list. For class loader data that are 839 // unloading, this frees the C heap memory for items on the list, and unlinks 840 // scratch or error classes so that unloading events aren't triggered for these 841 // classes. The metadata is removed with the unloading metaspace. 842 // There isn't C heap memory allocated for methods, so nothing is done for them. 843 void ClassLoaderData::unload_deallocate_list() { 844 // Don't need lock, at safepoint 845 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 846 assert(is_unloading(), "only called for ClassLoaderData that are unloading"); 847 if (_deallocate_list == NULL) { 848 return; 849 } 850 // Go backwards because this removes entries that are freed. 851 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 852 Metadata* m = _deallocate_list->at(i); 853 assert (!m->on_stack(), "wouldn't be unloading if this were so"); 854 _deallocate_list->remove_at(i); 855 if (m->is_constantPool()) { 856 ((ConstantPool*)m)->release_C_heap_structures(); 857 } else if (m->is_klass()) { 858 InstanceKlass* ik = (InstanceKlass*)m; 859 // also releases ik->constants() C heap memory 860 InstanceKlass::release_C_heap_structures(ik); 861 // Remove the class so unloading events aren't triggered for 862 // this class (scratch or error class) in do_unloading(). 863 remove_class(ik); 864 } 865 } 866 } 867 868 // These anonymous class loaders are to contain classes used for JSR292 869 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(Handle loader) { 870 // Add a new class loader data to the graph. 871 return ClassLoaderDataGraph::add(loader, true); 872 } 873 874 const char* ClassLoaderData::loader_name() const { 875 // Handles null class loader 876 return SystemDictionary::loader_name(class_loader()); 877 } 878 879 880 void ClassLoaderData::print_value_on(outputStream* out) const { 881 if (class_loader() != NULL) { 882 out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this)); 883 class_loader()->print_value_on(out); // includes loader_name() and address of class loader instance 884 } else { 885 // loader data: 0xsomeaddr of <bootloader> 886 out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name()); 887 } 888 if (is_anonymous()) { 889 out->print(" anonymous"); 890 } 891 } 892 893 #ifndef PRODUCT 894 void ClassLoaderData::print_on(outputStream* out) const { 895 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: %s {", 896 p2i(this), p2i((void *)class_loader()), loader_name()); 897 if (is_anonymous()) out->print(" anonymous"); 898 if (claimed()) out->print(" claimed"); 899 if (is_unloading()) out->print(" unloading"); 900 out->print(" metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null())); 901 902 if (_jmethod_ids != NULL) { 903 Method::print_jmethod_ids(this, out); 904 } 905 out->print(" handles count %d", _handles.count()); 906 out->print(" dependencies %d", _dependency_count); 907 out->print_cr("}"); 908 } 909 #endif // PRODUCT 910 911 void ClassLoaderData::verify() { 912 assert_locked_or_safepoint(_metaspace_lock); 913 oop cl = class_loader(); 914 915 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same"); 916 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be"); 917 918 // Verify the integrity of the allocated space. 919 if (metaspace_or_null() != NULL) { 920 metaspace_or_null()->verify(); 921 } 922 923 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 924 guarantee(k->class_loader_data() == this, "Must be the same"); 925 k->verify(); 926 assert(k != k->next_link(), "no loops!"); 927 } 928 } 929 930 bool ClassLoaderData::contains_klass(Klass* klass) { 931 // Lock-free access requires load_acquire 932 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 933 if (k == klass) return true; 934 } 935 return false; 936 } 937 938 939 // GC root of class loader data created. 940 ClassLoaderData* ClassLoaderDataGraph::_head = NULL; 941 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL; 942 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL; 943 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL; 944 945 bool ClassLoaderDataGraph::_should_purge = false; 946 bool ClassLoaderDataGraph::_metaspace_oom = false; 947 948 // Add a new class loader data node to the list. Assign the newly created 949 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field 950 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous) { 951 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the 952 // ClassLoaderData in the graph since the CLD 953 // contains unhandled oops 954 955 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous); 956 957 958 if (!is_anonymous) { 959 // First, Atomically set it 960 ClassLoaderData* old = java_lang_ClassLoader::cmpxchg_loader_data(cld, loader(), NULL); 961 if (old != NULL) { 962 delete cld; 963 // Returns the data. 964 return old; 965 } 966 } 967 968 // We won the race, and therefore the task of adding the data to the list of 969 // class loader data 970 ClassLoaderData** list_head = &_head; 971 ClassLoaderData* next = _head; 972 973 do { 974 cld->set_next(next); 975 ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next); 976 if (exchanged == next) { 977 LogTarget(Debug, class, loader, data) lt; 978 if (lt.is_enabled()) { 979 ResourceMark rm; 980 LogStream ls(lt); 981 ls.print("create "); 982 cld->print_value_on(&ls); 983 ls.cr(); 984 } 985 return cld; 986 } 987 next = exchanged; 988 } while (true); 989 } 990 991 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) { 992 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 993 cld->oops_do(f, must_claim); 994 } 995 } 996 997 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) { 998 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 999 if (cld->keep_alive()) { 1000 cld->oops_do(f, must_claim); 1001 } 1002 } 1003 } 1004 1005 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) { 1006 if (ClassUnloading) { 1007 keep_alive_oops_do(f, must_claim); 1008 } else { 1009 oops_do(f, must_claim); 1010 } 1011 } 1012 1013 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) { 1014 for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) { 1015 cl->do_cld(cld); 1016 } 1017 } 1018 1019 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { 1020 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1021 // Only walk the head until any clds not purged from prior unloading 1022 // (CMS doesn't purge right away). 1023 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1024 assert(cld->is_unloading(), "invariant"); 1025 cl->do_cld(cld); 1026 } 1027 } 1028 1029 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { 1030 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { 1031 CLDClosure* closure = cld->keep_alive() ? strong : weak; 1032 if (closure != NULL) { 1033 closure->do_cld(cld); 1034 } 1035 } 1036 } 1037 1038 void ClassLoaderDataGraph::keep_alive_cld_do(CLDClosure* cl) { 1039 roots_cld_do(cl, NULL); 1040 } 1041 1042 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) { 1043 if (ClassUnloading) { 1044 keep_alive_cld_do(cl); 1045 } else { 1046 cld_do(cl); 1047 } 1048 } 1049 1050 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) { 1051 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1052 cld->classes_do(klass_closure); 1053 } 1054 } 1055 1056 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) { 1057 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1058 cld->classes_do(f); 1059 } 1060 } 1061 1062 void ClassLoaderDataGraph::methods_do(void f(Method*)) { 1063 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1064 cld->methods_do(f); 1065 } 1066 } 1067 1068 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) { 1069 assert_locked_or_safepoint(Module_lock); 1070 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1071 cld->modules_do(f); 1072 } 1073 } 1074 1075 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) { 1076 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1077 // Only walk the head until any clds not purged from prior unloading 1078 // (CMS doesn't purge right away). 1079 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1080 assert(cld->is_unloading(), "invariant"); 1081 cld->modules_do(f); 1082 } 1083 } 1084 1085 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) { 1086 assert_locked_or_safepoint(Module_lock); 1087 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1088 cld->packages_do(f); 1089 } 1090 } 1091 1092 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) { 1093 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1094 // Only walk the head until any clds not purged from prior unloading 1095 // (CMS doesn't purge right away). 1096 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1097 assert(cld->is_unloading(), "invariant"); 1098 cld->packages_do(f); 1099 } 1100 } 1101 1102 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) { 1103 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1104 cld->loaded_classes_do(klass_closure); 1105 } 1106 } 1107 1108 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) { 1109 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1110 // Only walk the head until any clds not purged from prior unloading 1111 // (CMS doesn't purge right away). 1112 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1113 assert(cld->is_unloading(), "invariant"); 1114 cld->classes_do(f); 1115 } 1116 } 1117 1118 #define FOR_ALL_DICTIONARY(X) for (ClassLoaderData* X = _head; X != NULL; X = X->next()) \ 1119 if (X->dictionary() != NULL) 1120 1121 // Walk classes in the loaded class dictionaries in various forms. 1122 // Only walks the classes defined in this class loader. 1123 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) { 1124 FOR_ALL_DICTIONARY(cld) { 1125 cld->dictionary()->classes_do(f); 1126 } 1127 } 1128 1129 // Only walks the classes defined in this class loader. 1130 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) { 1131 FOR_ALL_DICTIONARY(cld) { 1132 cld->dictionary()->classes_do(f, CHECK); 1133 } 1134 } 1135 1136 // Walks all entries in the dictionary including entries initiated by this class loader. 1137 void ClassLoaderDataGraph::dictionary_all_entries_do(void f(InstanceKlass*, ClassLoaderData*)) { 1138 FOR_ALL_DICTIONARY(cld) { 1139 cld->dictionary()->all_entries_do(f); 1140 } 1141 } 1142 1143 void ClassLoaderDataGraph::verify_dictionary() { 1144 FOR_ALL_DICTIONARY(cld) { 1145 cld->dictionary()->verify(); 1146 } 1147 } 1148 1149 void ClassLoaderDataGraph::print_dictionary(outputStream* st) { 1150 FOR_ALL_DICTIONARY(cld) { 1151 st->print("Dictionary for "); 1152 cld->print_value_on(st); 1153 st->cr(); 1154 cld->dictionary()->print_on(st); 1155 st->cr(); 1156 } 1157 } 1158 1159 void ClassLoaderDataGraph::print_dictionary_statistics(outputStream* st) { 1160 FOR_ALL_DICTIONARY(cld) { 1161 ResourceMark rm; 1162 stringStream tempst; 1163 tempst.print("System Dictionary for %s", cld->loader_name()); 1164 cld->dictionary()->print_table_statistics(st, tempst.as_string()); 1165 } 1166 } 1167 1168 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() { 1169 assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?"); 1170 1171 GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>(); 1172 1173 // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true); 1174 ClassLoaderData* curr = _head; 1175 while (curr != _saved_head) { 1176 if (!curr->claimed()) { 1177 array->push(curr); 1178 LogTarget(Debug, class, loader, data) lt; 1179 if (lt.is_enabled()) { 1180 LogStream ls(lt); 1181 ls.print("found new CLD: "); 1182 curr->print_value_on(&ls); 1183 ls.cr(); 1184 } 1185 } 1186 1187 curr = curr->_next; 1188 } 1189 1190 return array; 1191 } 1192 1193 bool ClassLoaderDataGraph::unload_list_contains(const void* x) { 1194 assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint"); 1195 for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) { 1196 // Needs fixing, see JDK-8199007. 1197 if (cld->metaspace_or_null() != NULL && Metaspace::contains(x)) { 1198 return true; 1199 } 1200 } 1201 return false; 1202 } 1203 1204 #ifndef PRODUCT 1205 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) { 1206 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1207 if (loader_data == data) { 1208 return true; 1209 } 1210 } 1211 1212 return false; 1213 } 1214 #endif // PRODUCT 1215 1216 1217 // Move class loader data from main list to the unloaded list for unloading 1218 // and deallocation later. 1219 bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure, 1220 bool clean_previous_versions) { 1221 1222 ClassLoaderData* data = _head; 1223 ClassLoaderData* prev = NULL; 1224 bool seen_dead_loader = false; 1225 1226 // Mark metadata seen on the stack only so we can delete unneeded entries. 1227 // Only walk all metadata, including the expensive code cache walk, for Full GC 1228 // and only if class redefinition and if there's previous versions of 1229 // Klasses to delete. 1230 bool walk_all_metadata = clean_previous_versions && 1231 JvmtiExport::has_redefined_a_class() && 1232 InstanceKlass::has_previous_versions_and_reset(); 1233 MetadataOnStackMark md_on_stack(walk_all_metadata); 1234 1235 // Save previous _unloading pointer for CMS which may add to unloading list before 1236 // purging and we don't want to rewalk the previously unloaded class loader data. 1237 _saved_unloading = _unloading; 1238 1239 data = _head; 1240 while (data != NULL) { 1241 if (data->is_alive(is_alive_closure)) { 1242 // clean metaspace 1243 if (walk_all_metadata) { 1244 data->classes_do(InstanceKlass::purge_previous_versions); 1245 } 1246 data->free_deallocate_list(); 1247 prev = data; 1248 data = data->next(); 1249 continue; 1250 } 1251 seen_dead_loader = true; 1252 ClassLoaderData* dead = data; 1253 dead->unload(); 1254 data = data->next(); 1255 // Remove from loader list. 1256 // This class loader data will no longer be found 1257 // in the ClassLoaderDataGraph. 1258 if (prev != NULL) { 1259 prev->set_next(data); 1260 } else { 1261 assert(dead == _head, "sanity check"); 1262 _head = data; 1263 } 1264 dead->set_next(_unloading); 1265 _unloading = dead; 1266 } 1267 1268 if (seen_dead_loader) { 1269 data = _head; 1270 while (data != NULL) { 1271 // Remove entries in the dictionary of live class loader that have 1272 // initiated loading classes in a dead class loader. 1273 if (data->dictionary() != NULL) { 1274 data->dictionary()->do_unloading(is_alive_closure); 1275 } 1276 // Walk a ModuleEntry's reads, and a PackageEntry's exports 1277 // lists to determine if there are modules on those lists that are now 1278 // dead and should be removed. A module's life cycle is equivalent 1279 // to its defining class loader's life cycle. Since a module is 1280 // considered dead if its class loader is dead, these walks must 1281 // occur after each class loader's aliveness is determined. 1282 if (data->packages() != NULL) { 1283 data->packages()->purge_all_package_exports(); 1284 } 1285 if (data->modules_defined()) { 1286 data->modules()->purge_all_module_reads(); 1287 } 1288 data = data->next(); 1289 } 1290 1291 post_class_unload_events(); 1292 } 1293 1294 return seen_dead_loader; 1295 } 1296 1297 void ClassLoaderDataGraph::purge() { 1298 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1299 ClassLoaderData* list = _unloading; 1300 _unloading = NULL; 1301 ClassLoaderData* next = list; 1302 bool classes_unloaded = false; 1303 while (next != NULL) { 1304 ClassLoaderData* purge_me = next; 1305 next = purge_me->next(); 1306 delete purge_me; 1307 classes_unloaded = true; 1308 } 1309 if (classes_unloaded) { 1310 Metaspace::purge(); 1311 set_metaspace_oom(false); 1312 } 1313 } 1314 1315 int ClassLoaderDataGraph::resize_if_needed() { 1316 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1317 int resized = 0; 1318 if (Dictionary::does_any_dictionary_needs_resizing()) { 1319 FOR_ALL_DICTIONARY(cld) { 1320 if (cld->dictionary()->resize_if_needed()) { 1321 resized++; 1322 } 1323 } 1324 } 1325 return resized; 1326 } 1327 1328 void ClassLoaderDataGraph::post_class_unload_events() { 1329 #if INCLUDE_TRACE 1330 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1331 if (Tracing::enabled()) { 1332 if (Tracing::is_event_enabled(TraceClassUnloadEvent)) { 1333 assert(_unloading != NULL, "need class loader data unload list!"); 1334 _class_unload_time = Ticks::now(); 1335 classes_unloading_do(&class_unload_event); 1336 } 1337 Tracing::on_unloading_classes(); 1338 } 1339 #endif 1340 } 1341 1342 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic() 1343 : _next_klass(NULL) { 1344 ClassLoaderData* cld = ClassLoaderDataGraph::_head; 1345 Klass* klass = NULL; 1346 1347 // Find the first klass in the CLDG. 1348 while (cld != NULL) { 1349 assert_locked_or_safepoint(cld->metaspace_lock()); 1350 klass = cld->_klasses; 1351 if (klass != NULL) { 1352 _next_klass = klass; 1353 return; 1354 } 1355 cld = cld->next(); 1356 } 1357 } 1358 1359 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) { 1360 Klass* next = klass->next_link(); 1361 if (next != NULL) { 1362 return next; 1363 } 1364 1365 // No more klasses in the current CLD. Time to find a new CLD. 1366 ClassLoaderData* cld = klass->class_loader_data(); 1367 assert_locked_or_safepoint(cld->metaspace_lock()); 1368 while (next == NULL) { 1369 cld = cld->next(); 1370 if (cld == NULL) { 1371 break; 1372 } 1373 next = cld->_klasses; 1374 } 1375 1376 return next; 1377 } 1378 1379 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() { 1380 Klass* head = _next_klass; 1381 1382 while (head != NULL) { 1383 Klass* next = next_klass_in_cldg(head); 1384 1385 Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head); 1386 1387 if (old_head == head) { 1388 return head; // Won the CAS. 1389 } 1390 1391 head = old_head; 1392 } 1393 1394 // Nothing more for the iterator to hand out. 1395 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head)); 1396 return NULL; 1397 } 1398 1399 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() { 1400 _data = ClassLoaderDataGraph::_head; 1401 } 1402 1403 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {} 1404 1405 #ifndef PRODUCT 1406 // callable from debugger 1407 extern "C" int print_loader_data_graph() { 1408 ResourceMark rm; 1409 ClassLoaderDataGraph::print_on(tty); 1410 return 0; 1411 } 1412 1413 void ClassLoaderDataGraph::verify() { 1414 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1415 data->verify(); 1416 } 1417 } 1418 1419 void ClassLoaderDataGraph::print_on(outputStream * const out) { 1420 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1421 data->print_on(out); 1422 } 1423 } 1424 #endif // PRODUCT 1425 1426 #if INCLUDE_TRACE 1427 1428 Ticks ClassLoaderDataGraph::_class_unload_time; 1429 1430 void ClassLoaderDataGraph::class_unload_event(Klass* const k) { 1431 assert(k != NULL, "invariant"); 1432 1433 // post class unload event 1434 EventClassUnload event(UNTIMED); 1435 event.set_endtime(_class_unload_time); 1436 event.set_unloadedClass(k); 1437 event.set_definingClassLoader(k->class_loader_data()); 1438 event.commit(); 1439 } 1440 1441 #endif // INCLUDE_TRACE