1 /* 2 * Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 // A ClassLoaderData identifies the full set of class types that a class 26 // loader's name resolution strategy produces for a given configuration of the 27 // class loader. 28 // Class types in the ClassLoaderData may be defined by from class file binaries 29 // provided by the class loader, or from other class loader it interacts with 30 // according to its name resolution strategy. 31 // 32 // Class loaders that implement a deterministic name resolution strategy 33 // (including with respect to their delegation behavior), such as the boot, the 34 // platform, and the system loaders of the JDK's built-in class loader 35 // hierarchy, always produce the same linkset for a given configuration. 36 // 37 // ClassLoaderData carries information related to a linkset (e.g., 38 // metaspace holding its klass definitions). 39 // The System Dictionary and related data structures (e.g., placeholder table, 40 // loader constraints table) as well as the runtime representation of classes 41 // only reference ClassLoaderData. 42 // 43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that 44 // that represent the loader's "linking domain" in the JVM. 45 // 46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData, 47 // the singleton class the_null_class_loader_data(). 48 49 #include "precompiled.hpp" 50 #include "classfile/classLoaderData.hpp" 51 #include "classfile/classLoaderData.inline.hpp" 52 #include "classfile/dictionary.hpp" 53 #include "classfile/javaClasses.hpp" 54 #include "classfile/metadataOnStackMark.hpp" 55 #include "classfile/moduleEntry.hpp" 56 #include "classfile/packageEntry.hpp" 57 #include "classfile/systemDictionary.hpp" 58 #include "code/codeCache.hpp" 59 #include "gc/shared/gcLocker.hpp" 60 #include "logging/log.hpp" 61 #include "logging/logStream.hpp" 62 #include "memory/metadataFactory.hpp" 63 #include "memory/metaspaceShared.hpp" 64 #include "memory/oopFactory.hpp" 65 #include "memory/resourceArea.hpp" 66 #include "oops/access.inline.hpp" 67 #include "oops/objArrayOop.inline.hpp" 68 #include "oops/oop.inline.hpp" 69 #include "runtime/atomic.hpp" 70 #include "runtime/javaCalls.hpp" 71 #include "runtime/jniHandles.hpp" 72 #include "runtime/mutex.hpp" 73 #include "runtime/orderAccess.hpp" 74 #include "runtime/safepoint.hpp" 75 #include "runtime/synchronizer.hpp" 76 #include "utilities/growableArray.hpp" 77 #include "utilities/macros.hpp" 78 #include "utilities/ostream.hpp" 79 #if INCLUDE_TRACE 80 #include "trace/tracing.hpp" 81 #endif 82 83 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0; 84 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0; 85 86 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL; 87 88 void ClassLoaderData::init_null_class_loader_data() { 89 assert(_the_null_class_loader_data == NULL, "cannot initialize twice"); 90 assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice"); 91 92 _the_null_class_loader_data = new ClassLoaderData(Handle(), false); 93 ClassLoaderDataGraph::_head = _the_null_class_loader_data; 94 assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be"); 95 96 LogTarget(Debug, class, loader, data) lt; 97 if (lt.is_enabled()) { 98 ResourceMark rm; 99 LogStream ls(lt); 100 ls.print("create "); 101 _the_null_class_loader_data->print_value_on(&ls); 102 ls.cr(); 103 } 104 } 105 106 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous) : 107 _class_loader(h_class_loader()), 108 _is_anonymous(is_anonymous), 109 // An anonymous class loader data doesn't have anything to keep 110 // it from being unloaded during parsing of the anonymous class. 111 // The null-class-loader should always be kept alive. 112 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0), 113 _metaspace(NULL), _unloading(false), _klasses(NULL), 114 _modules(NULL), _packages(NULL), 115 _claimed(0), _modified_oops(true), _accumulated_modified_oops(false), 116 _jmethod_ids(NULL), _handles(), _deallocate_list(NULL), 117 _next(NULL), 118 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true, 119 Monitor::_safepoint_check_never)) { 120 121 // A ClassLoaderData created solely for an anonymous class should never have a 122 // ModuleEntryTable or PackageEntryTable created for it. The defining package 123 // and module for an anonymous class will be found in its host class. 124 if (!is_anonymous) { 125 _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size); 126 if (h_class_loader.is_null()) { 127 // Create unnamed module for boot loader 128 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this); 129 } else { 130 // Create unnamed module for all other loaders 131 _unnamed_module = ModuleEntry::create_unnamed_module(this); 132 } 133 _dictionary = create_dictionary(); 134 } else { 135 _packages = NULL; 136 _unnamed_module = NULL; 137 _dictionary = NULL; 138 } 139 140 NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies 141 142 TRACE_INIT_ID(this); 143 } 144 145 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() { 146 Chunk* c = _head; 147 while (c != NULL) { 148 Chunk* next = c->_next; 149 delete c; 150 c = next; 151 } 152 } 153 154 oop* ClassLoaderData::ChunkedHandleList::add(oop o) { 155 if (_head == NULL || _head->_size == Chunk::CAPACITY) { 156 Chunk* next = new Chunk(_head); 157 OrderAccess::release_store(&_head, next); 158 } 159 oop* handle = &_head->_data[_head->_size]; 160 *handle = o; 161 OrderAccess::release_store(&_head->_size, _head->_size + 1); 162 return handle; 163 } 164 165 int ClassLoaderData::ChunkedHandleList::count() const { 166 int count = 0; 167 Chunk* chunk = _head; 168 while (chunk != NULL) { 169 count += chunk->_size; 170 chunk = chunk->_next; 171 } 172 return count; 173 } 174 175 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) { 176 for (juint i = 0; i < size; i++) { 177 if (c->_data[i] != NULL) { 178 f->do_oop(&c->_data[i]); 179 } 180 } 181 } 182 183 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) { 184 Chunk* head = OrderAccess::load_acquire(&_head); 185 if (head != NULL) { 186 // Must be careful when reading size of head 187 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size)); 188 for (Chunk* c = head->_next; c != NULL; c = c->_next) { 189 oops_do_chunk(f, c, c->_size); 190 } 191 } 192 } 193 194 class VerifyContainsOopClosure : public OopClosure { 195 oop _target; 196 bool _found; 197 198 public: 199 VerifyContainsOopClosure(oop target) : _target(target), _found(false) {} 200 201 void do_oop(oop* p) { 202 if (p != NULL && *p == _target) { 203 _found = true; 204 } 205 } 206 207 void do_oop(narrowOop* p) { 208 // The ChunkedHandleList should not contain any narrowOop 209 ShouldNotReachHere(); 210 } 211 212 bool found() const { 213 return _found; 214 } 215 }; 216 217 bool ClassLoaderData::ChunkedHandleList::contains(oop p) { 218 VerifyContainsOopClosure cl(p); 219 oops_do(&cl); 220 return cl.found(); 221 } 222 223 bool ClassLoaderData::claim() { 224 if (_claimed == 1) { 225 return false; 226 } 227 228 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0; 229 } 230 231 // Anonymous classes have their own ClassLoaderData that is marked to keep alive 232 // while the class is being parsed, and if the class appears on the module fixup list. 233 // Due to the uniqueness that no other class shares the anonymous class' name or 234 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while 235 // it is being defined, therefore _keep_alive is not volatile or atomic. 236 void ClassLoaderData::inc_keep_alive() { 237 if (is_anonymous()) { 238 assert(_keep_alive >= 0, "Invalid keep alive increment count"); 239 _keep_alive++; 240 } 241 } 242 243 void ClassLoaderData::dec_keep_alive() { 244 if (is_anonymous()) { 245 assert(_keep_alive > 0, "Invalid keep alive decrement count"); 246 _keep_alive--; 247 } 248 } 249 250 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) { 251 if (must_claim && !claim()) { 252 return; 253 } 254 255 // Only clear modified_oops after the ClassLoaderData is claimed. 256 if (clear_mod_oops) { 257 clear_modified_oops(); 258 } 259 260 f->do_oop(&_class_loader); 261 _handles.oops_do(f); 262 } 263 264 void ClassLoaderData::classes_do(KlassClosure* klass_closure) { 265 // Lock-free access requires load_acquire 266 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 267 klass_closure->do_klass(k); 268 assert(k != k->next_link(), "no loops!"); 269 } 270 } 271 272 void ClassLoaderData::classes_do(void f(Klass * const)) { 273 // Lock-free access requires load_acquire 274 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 275 f(k); 276 assert(k != k->next_link(), "no loops!"); 277 } 278 } 279 280 void ClassLoaderData::methods_do(void f(Method*)) { 281 // Lock-free access requires load_acquire 282 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 283 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) { 284 InstanceKlass::cast(k)->methods_do(f); 285 } 286 } 287 } 288 289 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) { 290 // Lock-free access requires load_acquire 291 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 292 // Do not filter ArrayKlass oops here... 293 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) { 294 klass_closure->do_klass(k); 295 } 296 } 297 } 298 299 void ClassLoaderData::classes_do(void f(InstanceKlass*)) { 300 // Lock-free access requires load_acquire 301 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 302 if (k->is_instance_klass()) { 303 f(InstanceKlass::cast(k)); 304 } 305 assert(k != k->next_link(), "no loops!"); 306 } 307 } 308 309 void ClassLoaderData::modules_do(void f(ModuleEntry*)) { 310 assert_locked_or_safepoint(Module_lock); 311 if (_unnamed_module != NULL) { 312 f(_unnamed_module); 313 } 314 if (_modules != NULL) { 315 for (int i = 0; i < _modules->table_size(); i++) { 316 for (ModuleEntry* entry = _modules->bucket(i); 317 entry != NULL; 318 entry = entry->next()) { 319 f(entry); 320 } 321 } 322 } 323 } 324 325 void ClassLoaderData::packages_do(void f(PackageEntry*)) { 326 assert_locked_or_safepoint(Module_lock); 327 if (_packages != NULL) { 328 for (int i = 0; i < _packages->table_size(); i++) { 329 for (PackageEntry* entry = _packages->bucket(i); 330 entry != NULL; 331 entry = entry->next()) { 332 f(entry); 333 } 334 } 335 } 336 } 337 338 void ClassLoaderData::record_dependency(const Klass* k) { 339 assert(k != NULL, "invariant"); 340 341 ClassLoaderData * const from_cld = this; 342 ClassLoaderData * const to_cld = k->class_loader_data(); 343 344 // Do not need to record dependency if the dependency is to a class whose 345 // class loader data is never freed. (i.e. the dependency's class loader 346 // is one of the three builtin class loaders and the dependency is not 347 // anonymous.) 348 if (to_cld->is_permanent_class_loader_data()) { 349 return; 350 } 351 352 oop to; 353 if (to_cld->is_anonymous()) { 354 // Just return if an anonymous class is attempting to record a dependency 355 // to itself. (Note that every anonymous class has its own unique class 356 // loader data.) 357 if (to_cld == from_cld) { 358 return; 359 } 360 // Anonymous class dependencies are through the mirror. 361 to = k->java_mirror(); 362 } else { 363 to = to_cld->class_loader(); 364 oop from = from_cld->class_loader(); 365 366 // Just return if this dependency is to a class with the same or a parent 367 // class_loader. 368 if (from == to || java_lang_ClassLoader::isAncestor(from, to)) { 369 return; // this class loader is in the parent list, no need to add it. 370 } 371 } 372 373 // It's a dependency we won't find through GC, add it. 374 if (!_handles.contains(to)) { 375 NOT_PRODUCT(Atomic::inc(&_dependency_count)); 376 LogTarget(Trace, class, loader, data) lt; 377 if (lt.is_enabled()) { 378 ResourceMark rm; 379 LogStream ls(lt); 380 ls.print("adding dependency from "); 381 print_value_on(&ls); 382 ls.print(" to "); 383 to_cld->print_value_on(&ls); 384 ls.cr(); 385 } 386 Handle dependency(Thread::current(), to); 387 add_handle(dependency); 388 // Added a potentially young gen oop to the ClassLoaderData 389 record_modified_oops(); 390 } 391 } 392 393 394 void ClassLoaderDataGraph::clear_claimed_marks() { 395 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 396 cld->clear_claimed(); 397 } 398 } 399 400 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) { 401 { 402 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 403 Klass* old_value = _klasses; 404 k->set_next_link(old_value); 405 // Link the new item into the list, making sure the linked class is stable 406 // since the list can be walked without a lock 407 OrderAccess::release_store(&_klasses, k); 408 if (k->is_array_klass()) { 409 ClassLoaderDataGraph::inc_array_classes(1); 410 } else { 411 ClassLoaderDataGraph::inc_instance_classes(1); 412 } 413 } 414 415 if (publicize) { 416 LogTarget(Trace, class, loader, data) lt; 417 if (lt.is_enabled()) { 418 ResourceMark rm; 419 LogStream ls(lt); 420 ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name()); 421 print_value_on(&ls); 422 ls.cr(); 423 } 424 } 425 } 426 427 // Class iterator used by the compiler. It gets some number of classes at 428 // a safepoint to decay invocation counters on the methods. 429 class ClassLoaderDataGraphKlassIteratorStatic { 430 ClassLoaderData* _current_loader_data; 431 Klass* _current_class_entry; 432 public: 433 434 ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {} 435 436 InstanceKlass* try_get_next_class() { 437 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 438 size_t max_classes = ClassLoaderDataGraph::num_instance_classes(); 439 assert(max_classes > 0, "should not be called with no instance classes"); 440 for (size_t i = 0; i < max_classes; ) { 441 442 if (_current_class_entry != NULL) { 443 Klass* k = _current_class_entry; 444 _current_class_entry = _current_class_entry->next_link(); 445 446 if (k->is_instance_klass()) { 447 InstanceKlass* ik = InstanceKlass::cast(k); 448 i++; // count all instance classes found 449 // Not yet loaded classes are counted in max_classes 450 // but only return loaded classes. 451 if (ik->is_loaded()) { 452 return ik; 453 } 454 } 455 } else { 456 // Go to next CLD 457 if (_current_loader_data != NULL) { 458 _current_loader_data = _current_loader_data->next(); 459 } 460 // Start at the beginning 461 if (_current_loader_data == NULL) { 462 _current_loader_data = ClassLoaderDataGraph::_head; 463 } 464 465 _current_class_entry = _current_loader_data->klasses(); 466 } 467 } 468 // Should never be reached unless all instance classes have failed or are not fully loaded. 469 // Caller handles NULL. 470 return NULL; 471 } 472 473 // If the current class for the static iterator is a class being unloaded or 474 // deallocated, adjust the current class. 475 void adjust_saved_class(ClassLoaderData* cld) { 476 if (_current_loader_data == cld) { 477 _current_loader_data = cld->next(); 478 if (_current_loader_data != NULL) { 479 _current_class_entry = _current_loader_data->klasses(); 480 } // else try_get_next_class will start at the head 481 } 482 } 483 484 void adjust_saved_class(Klass* klass) { 485 if (_current_class_entry == klass) { 486 _current_class_entry = klass->next_link(); 487 } 488 } 489 }; 490 491 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator; 492 493 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() { 494 return static_klass_iterator.try_get_next_class(); 495 } 496 497 498 // Remove a klass from the _klasses list for scratch_class during redefinition 499 // or parsed class in the case of an error. 500 void ClassLoaderData::remove_class(Klass* scratch_class) { 501 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 502 503 // Adjust global class iterator. 504 static_klass_iterator.adjust_saved_class(scratch_class); 505 506 Klass* prev = NULL; 507 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 508 if (k == scratch_class) { 509 if (prev == NULL) { 510 _klasses = k->next_link(); 511 } else { 512 Klass* next = k->next_link(); 513 prev->set_next_link(next); 514 } 515 516 if (k->is_array_klass()) { 517 ClassLoaderDataGraph::dec_array_classes(1); 518 } else { 519 ClassLoaderDataGraph::dec_instance_classes(1); 520 } 521 522 return; 523 } 524 prev = k; 525 assert(k != k->next_link(), "no loops!"); 526 } 527 ShouldNotReachHere(); // should have found this class!! 528 } 529 530 void ClassLoaderData::unload() { 531 _unloading = true; 532 533 // Tell serviceability tools these classes are unloading 534 classes_do(InstanceKlass::notify_unload_class); 535 536 LogTarget(Debug, class, loader, data) lt; 537 if (lt.is_enabled()) { 538 ResourceMark rm; 539 LogStream ls(lt); 540 ls.print("unload "); 541 print_value_on(&ls); 542 ls.cr(); 543 } 544 545 // Some items on the _deallocate_list need to free their C heap structures 546 // if they are not already on the _klasses list. 547 unload_deallocate_list(); 548 549 // Clean up global class iterator for compiler 550 static_klass_iterator.adjust_saved_class(this); 551 } 552 553 ModuleEntryTable* ClassLoaderData::modules() { 554 // Lazily create the module entry table at first request. 555 // Lock-free access requires load_acquire. 556 ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules); 557 if (modules == NULL) { 558 MutexLocker m1(Module_lock); 559 // Check if _modules got allocated while we were waiting for this lock. 560 if ((modules = _modules) == NULL) { 561 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size); 562 563 { 564 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 565 // Ensure _modules is stable, since it is examined without a lock 566 OrderAccess::release_store(&_modules, modules); 567 } 568 } 569 } 570 return modules; 571 } 572 573 const int _boot_loader_dictionary_size = 1009; 574 const int _default_loader_dictionary_size = 107; 575 576 Dictionary* ClassLoaderData::create_dictionary() { 577 assert(!is_anonymous(), "anonymous class loader data do not have a dictionary"); 578 int size; 579 bool resizable = false; 580 if (_the_null_class_loader_data == NULL) { 581 size = _boot_loader_dictionary_size; 582 resizable = true; 583 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 584 size = 1; // there's only one class in relection class loader and no initiated classes 585 } else if (is_system_class_loader_data()) { 586 size = _boot_loader_dictionary_size; 587 resizable = true; 588 } else { 589 size = _default_loader_dictionary_size; 590 resizable = true; 591 } 592 if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces || UseSharedSpaces) { 593 resizable = false; 594 } 595 return new Dictionary(this, size, resizable); 596 } 597 598 // Unloading support 599 oop ClassLoaderData::keep_alive_object() const { 600 assert_locked_or_safepoint(_metaspace_lock); 601 assert(!keep_alive(), "Don't use with CLDs that are artificially kept alive"); 602 return is_anonymous() ? _klasses->java_mirror() : class_loader(); 603 } 604 605 bool ClassLoaderData::is_alive(BoolObjectClosure* is_alive_closure) const { 606 bool alive = keep_alive() // null class loader and incomplete anonymous klasses. 607 || is_alive_closure->do_object_b(keep_alive_object()); 608 609 return alive; 610 } 611 612 class ReleaseKlassClosure: public KlassClosure { 613 private: 614 size_t _instance_class_released; 615 size_t _array_class_released; 616 public: 617 ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { } 618 619 size_t instance_class_released() const { return _instance_class_released; } 620 size_t array_class_released() const { return _array_class_released; } 621 622 void do_klass(Klass* k) { 623 if (k->is_array_klass()) { 624 _array_class_released ++; 625 } else { 626 assert(k->is_instance_klass(), "Must be"); 627 _instance_class_released ++; 628 InstanceKlass::release_C_heap_structures(InstanceKlass::cast(k)); 629 } 630 } 631 }; 632 633 ClassLoaderData::~ClassLoaderData() { 634 // Release C heap structures for all the classes. 635 ReleaseKlassClosure cl; 636 classes_do(&cl); 637 638 ClassLoaderDataGraph::dec_array_classes(cl.array_class_released()); 639 ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released()); 640 641 // Release C heap allocated hashtable for all the packages. 642 if (_packages != NULL) { 643 // Destroy the table itself 644 delete _packages; 645 _packages = NULL; 646 } 647 648 // Release C heap allocated hashtable for all the modules. 649 if (_modules != NULL) { 650 // Destroy the table itself 651 delete _modules; 652 _modules = NULL; 653 } 654 655 // Release C heap allocated hashtable for the dictionary 656 if (_dictionary != NULL) { 657 // Destroy the table itself 658 delete _dictionary; 659 _dictionary = NULL; 660 } 661 662 if (_unnamed_module != NULL) { 663 _unnamed_module->delete_unnamed_module(); 664 _unnamed_module = NULL; 665 } 666 667 // release the metaspace 668 Metaspace *m = _metaspace; 669 if (m != NULL) { 670 _metaspace = NULL; 671 delete m; 672 } 673 // Clear all the JNI handles for methods 674 // These aren't deallocated and are going to look like a leak, but that's 675 // needed because we can't really get rid of jmethodIDs because we don't 676 // know when native code is going to stop using them. The spec says that 677 // they're "invalid" but existing programs likely rely on their being 678 // NULL after class unloading. 679 if (_jmethod_ids != NULL) { 680 Method::clear_jmethod_ids(this); 681 } 682 // Delete lock 683 delete _metaspace_lock; 684 685 // Delete free list 686 if (_deallocate_list != NULL) { 687 delete _deallocate_list; 688 } 689 } 690 691 // Returns true if this class loader data is for the app class loader 692 // or a user defined system class loader. (Note that the class loader 693 // data may be anonymous.) 694 bool ClassLoaderData::is_system_class_loader_data() const { 695 return SystemDictionary::is_system_class_loader(class_loader()); 696 } 697 698 // Returns true if this class loader data is for the platform class loader. 699 // (Note that the class loader data may be anonymous.) 700 bool ClassLoaderData::is_platform_class_loader_data() const { 701 return SystemDictionary::is_platform_class_loader(class_loader()); 702 } 703 704 // Returns true if the class loader for this class loader data is one of 705 // the 3 builtin (boot application/system or platform) class loaders, 706 // including a user-defined system class loader. Note that if the class 707 // loader data is for an anonymous class then it may get freed by a GC 708 // even if its class loader is one of these loaders. 709 bool ClassLoaderData::is_builtin_class_loader_data() const { 710 return (is_boot_class_loader_data() || 711 SystemDictionary::is_system_class_loader(class_loader()) || 712 SystemDictionary::is_platform_class_loader(class_loader())); 713 } 714 715 // Returns true if this class loader data is a class loader data 716 // that is not ever freed by a GC. It must be one of the builtin 717 // class loaders and not anonymous. 718 bool ClassLoaderData::is_permanent_class_loader_data() const { 719 return is_builtin_class_loader_data() && !is_anonymous(); 720 } 721 722 Metaspace* ClassLoaderData::metaspace_non_null() { 723 // If the metaspace has not been allocated, create a new one. Might want 724 // to create smaller arena for Reflection class loaders also. 725 // The reason for the delayed allocation is because some class loaders are 726 // simply for delegating with no metadata of their own. 727 // Lock-free access requires load_acquire. 728 Metaspace* metaspace = OrderAccess::load_acquire(&_metaspace); 729 if (metaspace == NULL) { 730 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); 731 // Check if _metaspace got allocated while we were waiting for this lock. 732 if ((metaspace = _metaspace) == NULL) { 733 if (this == the_null_class_loader_data()) { 734 assert (class_loader() == NULL, "Must be"); 735 metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType); 736 } else if (is_anonymous()) { 737 metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); 738 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 739 metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); 740 } else { 741 metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); 742 } 743 // Ensure _metaspace is stable, since it is examined without a lock 744 OrderAccess::release_store(&_metaspace, metaspace); 745 } 746 } 747 return metaspace; 748 } 749 750 OopHandle ClassLoaderData::add_handle(Handle h) { 751 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 752 record_modified_oops(); 753 return OopHandle(_handles.add(h())); 754 } 755 756 void ClassLoaderData::remove_handle(OopHandle h) { 757 assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading"); 758 oop* ptr = h.ptr_raw(); 759 if (ptr != NULL) { 760 assert(_handles.contains(*ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr)); 761 // This root is not walked in safepoints, and hence requires an appropriate 762 // decorator that e.g. maintains the SATB invariant in SATB collectors. 763 RootAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, oop(NULL)); 764 } 765 } 766 767 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) { 768 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 769 if (dest.resolve() != NULL) { 770 return; 771 } else { 772 dest = _handles.add(h()); 773 } 774 } 775 776 // Add this metadata pointer to be freed when it's safe. This is only during 777 // class unloading because Handles might point to this metadata field. 778 void ClassLoaderData::add_to_deallocate_list(Metadata* m) { 779 // Metadata in shared region isn't deleted. 780 if (!m->is_shared()) { 781 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 782 if (_deallocate_list == NULL) { 783 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); 784 } 785 _deallocate_list->append_if_missing(m); 786 } 787 } 788 789 // Deallocate free metadata on the free list. How useful the PermGen was! 790 void ClassLoaderData::free_deallocate_list() { 791 // Don't need lock, at safepoint 792 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 793 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading"); 794 if (_deallocate_list == NULL) { 795 return; 796 } 797 // Go backwards because this removes entries that are freed. 798 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 799 Metadata* m = _deallocate_list->at(i); 800 if (!m->on_stack()) { 801 _deallocate_list->remove_at(i); 802 // There are only three types of metadata that we deallocate directly. 803 // Cast them so they can be used by the template function. 804 if (m->is_method()) { 805 MetadataFactory::free_metadata(this, (Method*)m); 806 } else if (m->is_constantPool()) { 807 MetadataFactory::free_metadata(this, (ConstantPool*)m); 808 } else if (m->is_klass()) { 809 MetadataFactory::free_metadata(this, (InstanceKlass*)m); 810 } else { 811 ShouldNotReachHere(); 812 } 813 } else { 814 // Metadata is alive. 815 // If scratch_class is on stack then it shouldn't be on this list! 816 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), 817 "scratch classes on this list should be dead"); 818 // Also should assert that other metadata on the list was found in handles. 819 } 820 } 821 } 822 823 // This is distinct from free_deallocate_list. For class loader data that are 824 // unloading, this frees the C heap memory for items on the list, and unlinks 825 // scratch or error classes so that unloading events aren't triggered for these 826 // classes. The metadata is removed with the unloading metaspace. 827 // There isn't C heap memory allocated for methods, so nothing is done for them. 828 void ClassLoaderData::unload_deallocate_list() { 829 // Don't need lock, at safepoint 830 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 831 assert(is_unloading(), "only called for ClassLoaderData that are unloading"); 832 if (_deallocate_list == NULL) { 833 return; 834 } 835 // Go backwards because this removes entries that are freed. 836 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 837 Metadata* m = _deallocate_list->at(i); 838 assert (!m->on_stack(), "wouldn't be unloading if this were so"); 839 _deallocate_list->remove_at(i); 840 if (m->is_constantPool()) { 841 ((ConstantPool*)m)->release_C_heap_structures(); 842 } else if (m->is_klass()) { 843 InstanceKlass* ik = (InstanceKlass*)m; 844 // also releases ik->constants() C heap memory 845 InstanceKlass::release_C_heap_structures(ik); 846 // Remove the class so unloading events aren't triggered for 847 // this class (scratch or error class) in do_unloading(). 848 remove_class(ik); 849 } 850 } 851 } 852 853 // These anonymous class loaders are to contain classes used for JSR292 854 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(Handle loader) { 855 // Add a new class loader data to the graph. 856 return ClassLoaderDataGraph::add(loader, true); 857 } 858 859 const char* ClassLoaderData::loader_name() const { 860 // Handles null class loader 861 return SystemDictionary::loader_name(class_loader()); 862 } 863 864 865 void ClassLoaderData::print_value_on(outputStream* out) const { 866 if (class_loader() != NULL) { 867 out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this)); 868 class_loader()->print_value_on(out); // includes loader_name() and address of class loader instance 869 } else { 870 // loader data: 0xsomeaddr of <bootloader> 871 out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name()); 872 } 873 if (is_anonymous()) { 874 out->print(" anonymous"); 875 } 876 } 877 878 #ifndef PRODUCT 879 void ClassLoaderData::print_on(outputStream* out) const { 880 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: %s {", 881 p2i(this), p2i((void *)class_loader()), loader_name()); 882 if (is_anonymous()) out->print(" anonymous"); 883 if (claimed()) out->print(" claimed"); 884 if (is_unloading()) out->print(" unloading"); 885 out->print(" metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null())); 886 887 if (_jmethod_ids != NULL) { 888 Method::print_jmethod_ids(this, out); 889 } 890 out->print(" handles count %d", _handles.count()); 891 out->print(" dependencies %d", _dependency_count); 892 out->print_cr("}"); 893 } 894 #endif // PRODUCT 895 896 void ClassLoaderData::verify() { 897 assert_locked_or_safepoint(_metaspace_lock); 898 oop cl = class_loader(); 899 900 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same"); 901 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be"); 902 903 // Verify the integrity of the allocated space. 904 if (metaspace_or_null() != NULL) { 905 metaspace_or_null()->verify(); 906 } 907 908 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 909 guarantee(k->class_loader_data() == this, "Must be the same"); 910 k->verify(); 911 assert(k != k->next_link(), "no loops!"); 912 } 913 } 914 915 bool ClassLoaderData::contains_klass(Klass* klass) { 916 // Lock-free access requires load_acquire 917 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 918 if (k == klass) return true; 919 } 920 return false; 921 } 922 923 924 // GC root of class loader data created. 925 ClassLoaderData* ClassLoaderDataGraph::_head = NULL; 926 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL; 927 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL; 928 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL; 929 930 bool ClassLoaderDataGraph::_should_purge = false; 931 bool ClassLoaderDataGraph::_metaspace_oom = false; 932 933 // Add a new class loader data node to the list. Assign the newly created 934 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field 935 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous) { 936 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the 937 // ClassLoaderData in the graph since the CLD 938 // contains unhandled oops 939 940 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous); 941 942 943 if (!is_anonymous) { 944 // First, Atomically set it 945 ClassLoaderData* old = java_lang_ClassLoader::cmpxchg_loader_data(cld, loader(), NULL); 946 if (old != NULL) { 947 delete cld; 948 // Returns the data. 949 return old; 950 } 951 } 952 953 // We won the race, and therefore the task of adding the data to the list of 954 // class loader data 955 ClassLoaderData** list_head = &_head; 956 ClassLoaderData* next = _head; 957 958 do { 959 cld->set_next(next); 960 ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next); 961 if (exchanged == next) { 962 LogTarget(Debug, class, loader, data) lt; 963 if (lt.is_enabled()) { 964 ResourceMark rm; 965 LogStream ls(lt); 966 ls.print("create "); 967 cld->print_value_on(&ls); 968 ls.cr(); 969 } 970 return cld; 971 } 972 next = exchanged; 973 } while (true); 974 } 975 976 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) { 977 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 978 cld->oops_do(f, must_claim); 979 } 980 } 981 982 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) { 983 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 984 if (cld->keep_alive()) { 985 cld->oops_do(f, must_claim); 986 } 987 } 988 } 989 990 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) { 991 if (ClassUnloading) { 992 keep_alive_oops_do(f, must_claim); 993 } else { 994 oops_do(f, must_claim); 995 } 996 } 997 998 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) { 999 for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) { 1000 cl->do_cld(cld); 1001 } 1002 } 1003 1004 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { 1005 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1006 // Only walk the head until any clds not purged from prior unloading 1007 // (CMS doesn't purge right away). 1008 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1009 assert(cld->is_unloading(), "invariant"); 1010 cl->do_cld(cld); 1011 } 1012 } 1013 1014 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { 1015 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { 1016 CLDClosure* closure = cld->keep_alive() ? strong : weak; 1017 if (closure != NULL) { 1018 closure->do_cld(cld); 1019 } 1020 } 1021 } 1022 1023 void ClassLoaderDataGraph::keep_alive_cld_do(CLDClosure* cl) { 1024 roots_cld_do(cl, NULL); 1025 } 1026 1027 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) { 1028 if (ClassUnloading) { 1029 keep_alive_cld_do(cl); 1030 } else { 1031 cld_do(cl); 1032 } 1033 } 1034 1035 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) { 1036 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1037 cld->classes_do(klass_closure); 1038 } 1039 } 1040 1041 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) { 1042 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1043 cld->classes_do(f); 1044 } 1045 } 1046 1047 void ClassLoaderDataGraph::methods_do(void f(Method*)) { 1048 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1049 cld->methods_do(f); 1050 } 1051 } 1052 1053 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) { 1054 assert_locked_or_safepoint(Module_lock); 1055 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1056 cld->modules_do(f); 1057 } 1058 } 1059 1060 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) { 1061 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1062 // Only walk the head until any clds not purged from prior unloading 1063 // (CMS doesn't purge right away). 1064 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1065 assert(cld->is_unloading(), "invariant"); 1066 cld->modules_do(f); 1067 } 1068 } 1069 1070 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) { 1071 assert_locked_or_safepoint(Module_lock); 1072 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1073 cld->packages_do(f); 1074 } 1075 } 1076 1077 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) { 1078 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1079 // Only walk the head until any clds not purged from prior unloading 1080 // (CMS doesn't purge right away). 1081 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1082 assert(cld->is_unloading(), "invariant"); 1083 cld->packages_do(f); 1084 } 1085 } 1086 1087 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) { 1088 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1089 cld->loaded_classes_do(klass_closure); 1090 } 1091 } 1092 1093 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) { 1094 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1095 // Only walk the head until any clds not purged from prior unloading 1096 // (CMS doesn't purge right away). 1097 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1098 assert(cld->is_unloading(), "invariant"); 1099 cld->classes_do(f); 1100 } 1101 } 1102 1103 #define FOR_ALL_DICTIONARY(X) for (ClassLoaderData* X = _head; X != NULL; X = X->next()) \ 1104 if (X->dictionary() != NULL) 1105 1106 // Walk classes in the loaded class dictionaries in various forms. 1107 // Only walks the classes defined in this class loader. 1108 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) { 1109 FOR_ALL_DICTIONARY(cld) { 1110 cld->dictionary()->classes_do(f); 1111 } 1112 } 1113 1114 // Only walks the classes defined in this class loader. 1115 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) { 1116 FOR_ALL_DICTIONARY(cld) { 1117 cld->dictionary()->classes_do(f, CHECK); 1118 } 1119 } 1120 1121 // Walks all entries in the dictionary including entries initiated by this class loader. 1122 void ClassLoaderDataGraph::dictionary_all_entries_do(void f(InstanceKlass*, ClassLoaderData*)) { 1123 FOR_ALL_DICTIONARY(cld) { 1124 cld->dictionary()->all_entries_do(f); 1125 } 1126 } 1127 1128 void ClassLoaderDataGraph::verify_dictionary() { 1129 FOR_ALL_DICTIONARY(cld) { 1130 cld->dictionary()->verify(); 1131 } 1132 } 1133 1134 void ClassLoaderDataGraph::print_dictionary(outputStream* st) { 1135 FOR_ALL_DICTIONARY(cld) { 1136 st->print("Dictionary for "); 1137 cld->print_value_on(st); 1138 st->cr(); 1139 cld->dictionary()->print_on(st); 1140 st->cr(); 1141 } 1142 } 1143 1144 void ClassLoaderDataGraph::print_dictionary_statistics(outputStream* st) { 1145 FOR_ALL_DICTIONARY(cld) { 1146 ResourceMark rm; 1147 stringStream tempst; 1148 tempst.print("System Dictionary for %s", cld->loader_name()); 1149 cld->dictionary()->print_table_statistics(st, tempst.as_string()); 1150 } 1151 } 1152 1153 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() { 1154 assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?"); 1155 1156 GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>(); 1157 1158 // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true); 1159 ClassLoaderData* curr = _head; 1160 while (curr != _saved_head) { 1161 if (!curr->claimed()) { 1162 array->push(curr); 1163 LogTarget(Debug, class, loader, data) lt; 1164 if (lt.is_enabled()) { 1165 LogStream ls(lt); 1166 ls.print("found new CLD: "); 1167 curr->print_value_on(&ls); 1168 ls.cr(); 1169 } 1170 } 1171 1172 curr = curr->_next; 1173 } 1174 1175 return array; 1176 } 1177 1178 bool ClassLoaderDataGraph::unload_list_contains(const void* x) { 1179 assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint"); 1180 for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) { 1181 if (cld->metaspace_or_null() != NULL && cld->metaspace_or_null()->contains(x)) { 1182 return true; 1183 } 1184 } 1185 return false; 1186 } 1187 1188 #ifndef PRODUCT 1189 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) { 1190 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1191 if (loader_data == data) { 1192 return true; 1193 } 1194 } 1195 1196 return false; 1197 } 1198 #endif // PRODUCT 1199 1200 1201 // Move class loader data from main list to the unloaded list for unloading 1202 // and deallocation later. 1203 bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure, 1204 bool clean_previous_versions) { 1205 1206 ClassLoaderData* data = _head; 1207 ClassLoaderData* prev = NULL; 1208 bool seen_dead_loader = false; 1209 1210 // Mark metadata seen on the stack only so we can delete unneeded entries. 1211 // Only walk all metadata, including the expensive code cache walk, for Full GC 1212 // and only if class redefinition and if there's previous versions of 1213 // Klasses to delete. 1214 bool walk_all_metadata = clean_previous_versions && 1215 JvmtiExport::has_redefined_a_class() && 1216 InstanceKlass::has_previous_versions_and_reset(); 1217 MetadataOnStackMark md_on_stack(walk_all_metadata); 1218 1219 // Save previous _unloading pointer for CMS which may add to unloading list before 1220 // purging and we don't want to rewalk the previously unloaded class loader data. 1221 _saved_unloading = _unloading; 1222 1223 data = _head; 1224 while (data != NULL) { 1225 if (data->is_alive(is_alive_closure)) { 1226 // clean metaspace 1227 if (walk_all_metadata) { 1228 data->classes_do(InstanceKlass::purge_previous_versions); 1229 } 1230 data->free_deallocate_list(); 1231 prev = data; 1232 data = data->next(); 1233 continue; 1234 } 1235 seen_dead_loader = true; 1236 ClassLoaderData* dead = data; 1237 dead->unload(); 1238 data = data->next(); 1239 // Remove from loader list. 1240 // This class loader data will no longer be found 1241 // in the ClassLoaderDataGraph. 1242 if (prev != NULL) { 1243 prev->set_next(data); 1244 } else { 1245 assert(dead == _head, "sanity check"); 1246 _head = data; 1247 } 1248 dead->set_next(_unloading); 1249 _unloading = dead; 1250 } 1251 1252 if (seen_dead_loader) { 1253 data = _head; 1254 while (data != NULL) { 1255 // Remove entries in the dictionary of live class loader that have 1256 // initiated loading classes in a dead class loader. 1257 if (data->dictionary() != NULL) { 1258 data->dictionary()->do_unloading(is_alive_closure); 1259 } 1260 // Walk a ModuleEntry's reads, and a PackageEntry's exports 1261 // lists to determine if there are modules on those lists that are now 1262 // dead and should be removed. A module's life cycle is equivalent 1263 // to its defining class loader's life cycle. Since a module is 1264 // considered dead if its class loader is dead, these walks must 1265 // occur after each class loader's aliveness is determined. 1266 if (data->packages() != NULL) { 1267 data->packages()->purge_all_package_exports(); 1268 } 1269 if (data->modules_defined()) { 1270 data->modules()->purge_all_module_reads(); 1271 } 1272 data = data->next(); 1273 } 1274 1275 post_class_unload_events(); 1276 } 1277 1278 return seen_dead_loader; 1279 } 1280 1281 void ClassLoaderDataGraph::purge() { 1282 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1283 ClassLoaderData* list = _unloading; 1284 _unloading = NULL; 1285 ClassLoaderData* next = list; 1286 bool classes_unloaded = false; 1287 while (next != NULL) { 1288 ClassLoaderData* purge_me = next; 1289 next = purge_me->next(); 1290 delete purge_me; 1291 classes_unloaded = true; 1292 } 1293 if (classes_unloaded) { 1294 Metaspace::purge(); 1295 set_metaspace_oom(false); 1296 } 1297 } 1298 1299 int ClassLoaderDataGraph::resize_if_needed() { 1300 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1301 int resized = 0; 1302 if (Dictionary::does_any_dictionary_needs_resizing()) { 1303 FOR_ALL_DICTIONARY(cld) { 1304 if (cld->dictionary()->resize_if_needed()) { 1305 resized++; 1306 } 1307 } 1308 } 1309 return resized; 1310 } 1311 1312 void ClassLoaderDataGraph::post_class_unload_events() { 1313 #if INCLUDE_TRACE 1314 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1315 if (Tracing::enabled()) { 1316 if (Tracing::is_event_enabled(TraceClassUnloadEvent)) { 1317 assert(_unloading != NULL, "need class loader data unload list!"); 1318 _class_unload_time = Ticks::now(); 1319 classes_unloading_do(&class_unload_event); 1320 } 1321 Tracing::on_unloading_classes(); 1322 } 1323 #endif 1324 } 1325 1326 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic() 1327 : _next_klass(NULL) { 1328 ClassLoaderData* cld = ClassLoaderDataGraph::_head; 1329 Klass* klass = NULL; 1330 1331 // Find the first klass in the CLDG. 1332 while (cld != NULL) { 1333 assert_locked_or_safepoint(cld->metaspace_lock()); 1334 klass = cld->_klasses; 1335 if (klass != NULL) { 1336 _next_klass = klass; 1337 return; 1338 } 1339 cld = cld->next(); 1340 } 1341 } 1342 1343 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) { 1344 Klass* next = klass->next_link(); 1345 if (next != NULL) { 1346 return next; 1347 } 1348 1349 // No more klasses in the current CLD. Time to find a new CLD. 1350 ClassLoaderData* cld = klass->class_loader_data(); 1351 assert_locked_or_safepoint(cld->metaspace_lock()); 1352 while (next == NULL) { 1353 cld = cld->next(); 1354 if (cld == NULL) { 1355 break; 1356 } 1357 next = cld->_klasses; 1358 } 1359 1360 return next; 1361 } 1362 1363 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() { 1364 Klass* head = _next_klass; 1365 1366 while (head != NULL) { 1367 Klass* next = next_klass_in_cldg(head); 1368 1369 Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head); 1370 1371 if (old_head == head) { 1372 return head; // Won the CAS. 1373 } 1374 1375 head = old_head; 1376 } 1377 1378 // Nothing more for the iterator to hand out. 1379 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head)); 1380 return NULL; 1381 } 1382 1383 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() { 1384 _data = ClassLoaderDataGraph::_head; 1385 } 1386 1387 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {} 1388 1389 #ifndef PRODUCT 1390 // callable from debugger 1391 extern "C" int print_loader_data_graph() { 1392 ResourceMark rm; 1393 ClassLoaderDataGraph::print_on(tty); 1394 return 0; 1395 } 1396 1397 void ClassLoaderDataGraph::verify() { 1398 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1399 data->verify(); 1400 } 1401 } 1402 1403 void ClassLoaderDataGraph::print_on(outputStream * const out) { 1404 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1405 data->print_on(out); 1406 } 1407 } 1408 #endif // PRODUCT 1409 1410 #if INCLUDE_TRACE 1411 1412 Ticks ClassLoaderDataGraph::_class_unload_time; 1413 1414 void ClassLoaderDataGraph::class_unload_event(Klass* const k) { 1415 assert(k != NULL, "invariant"); 1416 1417 // post class unload event 1418 EventClassUnload event(UNTIMED); 1419 event.set_endtime(_class_unload_time); 1420 event.set_unloadedClass(k); 1421 event.set_definingClassLoader(k->class_loader_data()); 1422 event.commit(); 1423 } 1424 1425 #endif // INCLUDE_TRACE