1 /* 2 * Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 // A ClassLoaderData identifies the full set of class types that a class 26 // loader's name resolution strategy produces for a given configuration of the 27 // class loader. 28 // Class types in the ClassLoaderData may be defined by from class file binaries 29 // provided by the class loader, or from other class loader it interacts with 30 // according to its name resolution strategy. 31 // 32 // Class loaders that implement a deterministic name resolution strategy 33 // (including with respect to their delegation behavior), such as the boot, the 34 // platform, and the system loaders of the JDK's built-in class loader 35 // hierarchy, always produce the same linkset for a given configuration. 36 // 37 // ClassLoaderData carries information related to a linkset (e.g., 38 // metaspace holding its klass definitions). 39 // The System Dictionary and related data structures (e.g., placeholder table, 40 // loader constraints table) as well as the runtime representation of classes 41 // only reference ClassLoaderData. 42 // 43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that 44 // that represent the loader's "linking domain" in the JVM. 45 // 46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData, 47 // the singleton class the_null_class_loader_data(). 48 49 #include "precompiled.hpp" 50 #include "classfile/classLoaderData.hpp" 51 #include "classfile/classLoaderData.inline.hpp" 52 #include "classfile/dictionary.hpp" 53 #include "classfile/javaClasses.hpp" 54 #include "classfile/metadataOnStackMark.hpp" 55 #include "classfile/moduleEntry.hpp" 56 #include "classfile/packageEntry.hpp" 57 #include "classfile/symbolTable.hpp" 58 #include "classfile/systemDictionary.hpp" 59 #include "logging/log.hpp" 60 #include "logging/logStream.hpp" 61 #include "memory/allocation.inline.hpp" 62 #include "memory/metadataFactory.hpp" 63 #include "memory/metaspaceShared.hpp" 64 #include "memory/resourceArea.hpp" 65 #include "memory/universe.hpp" 66 #include "oops/access.inline.hpp" 67 #include "oops/oop.inline.hpp" 68 #include "oops/oopHandle.inline.hpp" 69 #include "oops/weakHandle.inline.hpp" 70 #include "runtime/atomic.hpp" 71 #include "runtime/handles.inline.hpp" 72 #include "runtime/mutex.hpp" 73 #include "runtime/orderAccess.hpp" 74 #include "runtime/safepoint.hpp" 75 #include "runtime/safepointVerifiers.hpp" 76 #include "utilities/growableArray.hpp" 77 #include "utilities/macros.hpp" 78 #include "utilities/ostream.hpp" 79 #include "utilities/ticks.hpp" 80 #if INCLUDE_JFR 81 #include "jfr/jfr.hpp" 82 #include "jfr/jfrEvents.hpp" 83 #endif 84 85 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0; 86 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0; 87 88 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL; 89 90 void ClassLoaderData::init_null_class_loader_data() { 91 assert(_the_null_class_loader_data == NULL, "cannot initialize twice"); 92 assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice"); 93 94 _the_null_class_loader_data = new ClassLoaderData(Handle(), false); 95 ClassLoaderDataGraph::_head = _the_null_class_loader_data; 96 assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be"); 97 98 LogTarget(Debug, class, loader, data) lt; 99 if (lt.is_enabled()) { 100 ResourceMark rm; 101 LogStream ls(lt); 102 ls.print("create "); 103 _the_null_class_loader_data->print_value_on(&ls); 104 ls.cr(); 105 } 106 } 107 108 // JFR and logging support so that the name and klass are available after the 109 // class_loader oop is no longer alive, during unloading. 110 void ClassLoaderData::initialize_name_and_klass(Handle class_loader) { 111 _class_loader_klass = class_loader->klass(); 112 oop class_loader_name = java_lang_ClassLoader::name(class_loader()); 113 if (class_loader_name != NULL) { 114 Thread* THREAD = Thread::current(); 115 ResourceMark rm(THREAD); 116 const char* class_loader_instance_name = 117 java_lang_String::as_utf8_string(class_loader_name); 118 119 if (class_loader_instance_name != NULL && class_loader_instance_name[0] != '\0') { 120 // Can't throw InternalError and SymbolTable doesn't throw OOM anymore. 121 _class_loader_name = SymbolTable::new_symbol(class_loader_instance_name, CATCH); 122 } 123 } 124 } 125 126 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous) : 127 _is_anonymous(is_anonymous), 128 // An anonymous class loader data doesn't have anything to keep 129 // it from being unloaded during parsing of the anonymous class. 130 // The null-class-loader should always be kept alive. 131 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0), 132 _metaspace(NULL), _unloading(false), _klasses(NULL), 133 _modules(NULL), _packages(NULL), _unnamed_module(NULL), _dictionary(NULL), 134 _claimed(0), _modified_oops(true), _accumulated_modified_oops(false), 135 _jmethod_ids(NULL), _handles(), _deallocate_list(NULL), 136 _next(NULL), 137 _class_loader_klass(NULL), _class_loader_name(NULL), 138 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true, 139 Monitor::_safepoint_check_never)) { 140 141 if (!h_class_loader.is_null()) { 142 _class_loader = _handles.add(h_class_loader()); 143 } 144 145 if (!is_anonymous) { 146 // The holder is initialized later for anonymous classes, and before calling anything 147 // that call class_loader(). 148 initialize_holder(h_class_loader); 149 150 // A ClassLoaderData created solely for an anonymous class should never have a 151 // ModuleEntryTable or PackageEntryTable created for it. The defining package 152 // and module for an anonymous class will be found in its host class. 153 _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size); 154 if (h_class_loader.is_null()) { 155 // Create unnamed module for boot loader 156 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this); 157 } else { 158 // Create unnamed module for all other loaders 159 _unnamed_module = ModuleEntry::create_unnamed_module(this); 160 } 161 _dictionary = create_dictionary(); 162 } 163 164 NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies 165 166 JFR_ONLY(INIT_ID(this);) 167 } 168 169 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() { 170 Chunk* c = _head; 171 while (c != NULL) { 172 Chunk* next = c->_next; 173 delete c; 174 c = next; 175 } 176 } 177 178 oop* ClassLoaderData::ChunkedHandleList::add(oop o) { 179 if (_head == NULL || _head->_size == Chunk::CAPACITY) { 180 Chunk* next = new Chunk(_head); 181 OrderAccess::release_store(&_head, next); 182 } 183 oop* handle = &_head->_data[_head->_size]; 184 *handle = o; 185 OrderAccess::release_store(&_head->_size, _head->_size + 1); 186 return handle; 187 } 188 189 int ClassLoaderData::ChunkedHandleList::count() const { 190 int count = 0; 191 Chunk* chunk = _head; 192 while (chunk != NULL) { 193 count += chunk->_size; 194 chunk = chunk->_next; 195 } 196 return count; 197 } 198 199 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) { 200 for (juint i = 0; i < size; i++) { 201 if (c->_data[i] != NULL) { 202 f->do_oop(&c->_data[i]); 203 } 204 } 205 } 206 207 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) { 208 Chunk* head = OrderAccess::load_acquire(&_head); 209 if (head != NULL) { 210 // Must be careful when reading size of head 211 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size)); 212 for (Chunk* c = head->_next; c != NULL; c = c->_next) { 213 oops_do_chunk(f, c, c->_size); 214 } 215 } 216 } 217 218 class VerifyContainsOopClosure : public OopClosure { 219 oop _target; 220 bool _found; 221 222 public: 223 VerifyContainsOopClosure(oop target) : _target(target), _found(false) {} 224 225 void do_oop(oop* p) { 226 if (p != NULL && oopDesc::equals(RawAccess<>::oop_load(p), _target)) { 227 _found = true; 228 } 229 } 230 231 void do_oop(narrowOop* p) { 232 // The ChunkedHandleList should not contain any narrowOop 233 ShouldNotReachHere(); 234 } 235 236 bool found() const { 237 return _found; 238 } 239 }; 240 241 bool ClassLoaderData::ChunkedHandleList::contains(oop p) { 242 VerifyContainsOopClosure cl(p); 243 oops_do(&cl); 244 return cl.found(); 245 } 246 247 #ifndef PRODUCT 248 bool ClassLoaderData::ChunkedHandleList::owner_of(oop* oop_handle) { 249 Chunk* chunk = _head; 250 while (chunk != NULL) { 251 if (&(chunk->_data[0]) <= oop_handle && oop_handle < &(chunk->_data[chunk->_size])) { 252 return true; 253 } 254 chunk = chunk->_next; 255 } 256 return false; 257 } 258 #endif // PRODUCT 259 260 bool ClassLoaderData::claim() { 261 if (_claimed == 1) { 262 return false; 263 } 264 265 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0; 266 } 267 268 // Anonymous classes have their own ClassLoaderData that is marked to keep alive 269 // while the class is being parsed, and if the class appears on the module fixup list. 270 // Due to the uniqueness that no other class shares the anonymous class' name or 271 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while 272 // it is being defined, therefore _keep_alive is not volatile or atomic. 273 void ClassLoaderData::inc_keep_alive() { 274 if (is_anonymous()) { 275 assert(_keep_alive >= 0, "Invalid keep alive increment count"); 276 _keep_alive++; 277 } 278 } 279 280 void ClassLoaderData::dec_keep_alive() { 281 if (is_anonymous()) { 282 assert(_keep_alive > 0, "Invalid keep alive decrement count"); 283 _keep_alive--; 284 } 285 } 286 287 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) { 288 if (must_claim && !claim()) { 289 return; 290 } 291 292 // Only clear modified_oops after the ClassLoaderData is claimed. 293 if (clear_mod_oops) { 294 clear_modified_oops(); 295 } 296 297 _handles.oops_do(f); 298 } 299 300 void ClassLoaderData::classes_do(KlassClosure* klass_closure) { 301 // Lock-free access requires load_acquire 302 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 303 klass_closure->do_klass(k); 304 assert(k != k->next_link(), "no loops!"); 305 } 306 } 307 308 void ClassLoaderData::classes_do(void f(Klass * const)) { 309 // Lock-free access requires load_acquire 310 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 311 f(k); 312 assert(k != k->next_link(), "no loops!"); 313 } 314 } 315 316 void ClassLoaderData::methods_do(void f(Method*)) { 317 // Lock-free access requires load_acquire 318 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 319 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) { 320 InstanceKlass::cast(k)->methods_do(f); 321 } 322 } 323 } 324 325 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) { 326 // Lock-free access requires load_acquire 327 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 328 // Do not filter ArrayKlass oops here... 329 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) { 330 klass_closure->do_klass(k); 331 } 332 } 333 } 334 335 void ClassLoaderData::classes_do(void f(InstanceKlass*)) { 336 // Lock-free access requires load_acquire 337 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 338 if (k->is_instance_klass()) { 339 f(InstanceKlass::cast(k)); 340 } 341 assert(k != k->next_link(), "no loops!"); 342 } 343 } 344 345 void ClassLoaderData::modules_do(void f(ModuleEntry*)) { 346 assert_locked_or_safepoint(Module_lock); 347 if (_unnamed_module != NULL) { 348 f(_unnamed_module); 349 } 350 if (_modules != NULL) { 351 for (int i = 0; i < _modules->table_size(); i++) { 352 for (ModuleEntry* entry = _modules->bucket(i); 353 entry != NULL; 354 entry = entry->next()) { 355 f(entry); 356 } 357 } 358 } 359 } 360 361 void ClassLoaderData::packages_do(void f(PackageEntry*)) { 362 assert_locked_or_safepoint(Module_lock); 363 if (_packages != NULL) { 364 for (int i = 0; i < _packages->table_size(); i++) { 365 for (PackageEntry* entry = _packages->bucket(i); 366 entry != NULL; 367 entry = entry->next()) { 368 f(entry); 369 } 370 } 371 } 372 } 373 374 void ClassLoaderData::record_dependency(const Klass* k) { 375 assert(k != NULL, "invariant"); 376 377 ClassLoaderData * const from_cld = this; 378 ClassLoaderData * const to_cld = k->class_loader_data(); 379 380 // Do not need to record dependency if the dependency is to a class whose 381 // class loader data is never freed. (i.e. the dependency's class loader 382 // is one of the three builtin class loaders and the dependency is not 383 // anonymous.) 384 if (to_cld->is_permanent_class_loader_data()) { 385 return; 386 } 387 388 oop to; 389 if (to_cld->is_anonymous()) { 390 // Just return if an anonymous class is attempting to record a dependency 391 // to itself. (Note that every anonymous class has its own unique class 392 // loader data.) 393 if (to_cld == from_cld) { 394 return; 395 } 396 // Anonymous class dependencies are through the mirror. 397 to = k->java_mirror(); 398 } else { 399 to = to_cld->class_loader(); 400 oop from = from_cld->class_loader(); 401 402 // Just return if this dependency is to a class with the same or a parent 403 // class_loader. 404 if (oopDesc::equals(from, to) || java_lang_ClassLoader::isAncestor(from, to)) { 405 return; // this class loader is in the parent list, no need to add it. 406 } 407 } 408 409 // It's a dependency we won't find through GC, add it. 410 if (!_handles.contains(to)) { 411 NOT_PRODUCT(Atomic::inc(&_dependency_count)); 412 LogTarget(Trace, class, loader, data) lt; 413 if (lt.is_enabled()) { 414 ResourceMark rm; 415 LogStream ls(lt); 416 ls.print("adding dependency from "); 417 print_value_on(&ls); 418 ls.print(" to "); 419 to_cld->print_value_on(&ls); 420 ls.cr(); 421 } 422 Handle dependency(Thread::current(), to); 423 add_handle(dependency); 424 // Added a potentially young gen oop to the ClassLoaderData 425 record_modified_oops(); 426 } 427 } 428 429 430 void ClassLoaderDataGraph::clear_claimed_marks() { 431 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 432 cld->clear_claimed(); 433 } 434 } 435 436 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) { 437 { 438 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 439 Klass* old_value = _klasses; 440 k->set_next_link(old_value); 441 // Link the new item into the list, making sure the linked class is stable 442 // since the list can be walked without a lock 443 OrderAccess::release_store(&_klasses, k); 444 if (k->is_array_klass()) { 445 ClassLoaderDataGraph::inc_array_classes(1); 446 } else { 447 ClassLoaderDataGraph::inc_instance_classes(1); 448 } 449 } 450 451 if (publicize) { 452 LogTarget(Trace, class, loader, data) lt; 453 if (lt.is_enabled()) { 454 ResourceMark rm; 455 LogStream ls(lt); 456 ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name()); 457 print_value_on(&ls); 458 ls.cr(); 459 } 460 } 461 } 462 463 // Class iterator used by the compiler. It gets some number of classes at 464 // a safepoint to decay invocation counters on the methods. 465 class ClassLoaderDataGraphKlassIteratorStatic { 466 ClassLoaderData* _current_loader_data; 467 Klass* _current_class_entry; 468 public: 469 470 ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {} 471 472 InstanceKlass* try_get_next_class() { 473 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 474 size_t max_classes = ClassLoaderDataGraph::num_instance_classes(); 475 assert(max_classes > 0, "should not be called with no instance classes"); 476 for (size_t i = 0; i < max_classes; ) { 477 478 if (_current_class_entry != NULL) { 479 Klass* k = _current_class_entry; 480 _current_class_entry = _current_class_entry->next_link(); 481 482 if (k->is_instance_klass()) { 483 InstanceKlass* ik = InstanceKlass::cast(k); 484 i++; // count all instance classes found 485 // Not yet loaded classes are counted in max_classes 486 // but only return loaded classes. 487 if (ik->is_loaded()) { 488 return ik; 489 } 490 } 491 } else { 492 // Go to next CLD 493 if (_current_loader_data != NULL) { 494 _current_loader_data = _current_loader_data->next(); 495 } 496 // Start at the beginning 497 if (_current_loader_data == NULL) { 498 _current_loader_data = ClassLoaderDataGraph::_head; 499 } 500 501 _current_class_entry = _current_loader_data->klasses(); 502 } 503 } 504 // Should never be reached unless all instance classes have failed or are not fully loaded. 505 // Caller handles NULL. 506 return NULL; 507 } 508 509 // If the current class for the static iterator is a class being unloaded or 510 // deallocated, adjust the current class. 511 void adjust_saved_class(ClassLoaderData* cld) { 512 if (_current_loader_data == cld) { 513 _current_loader_data = cld->next(); 514 if (_current_loader_data != NULL) { 515 _current_class_entry = _current_loader_data->klasses(); 516 } // else try_get_next_class will start at the head 517 } 518 } 519 520 void adjust_saved_class(Klass* klass) { 521 if (_current_class_entry == klass) { 522 _current_class_entry = klass->next_link(); 523 } 524 } 525 }; 526 527 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator; 528 529 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() { 530 return static_klass_iterator.try_get_next_class(); 531 } 532 533 534 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) { 535 if (loader_or_mirror() != NULL) { 536 assert(_holder.is_null(), "never replace holders"); 537 _holder = WeakHandle<vm_class_loader_data>::create(loader_or_mirror); 538 } 539 } 540 541 // Remove a klass from the _klasses list for scratch_class during redefinition 542 // or parsed class in the case of an error. 543 void ClassLoaderData::remove_class(Klass* scratch_class) { 544 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 545 546 // Adjust global class iterator. 547 static_klass_iterator.adjust_saved_class(scratch_class); 548 549 Klass* prev = NULL; 550 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 551 if (k == scratch_class) { 552 if (prev == NULL) { 553 _klasses = k->next_link(); 554 } else { 555 Klass* next = k->next_link(); 556 prev->set_next_link(next); 557 } 558 559 if (k->is_array_klass()) { 560 ClassLoaderDataGraph::dec_array_classes(1); 561 } else { 562 ClassLoaderDataGraph::dec_instance_classes(1); 563 } 564 565 return; 566 } 567 prev = k; 568 assert(k != k->next_link(), "no loops!"); 569 } 570 ShouldNotReachHere(); // should have found this class!! 571 } 572 573 void ClassLoaderData::unload() { 574 _unloading = true; 575 576 LogTarget(Debug, class, loader, data) lt; 577 if (lt.is_enabled()) { 578 ResourceMark rm; 579 LogStream ls(lt); 580 ls.print("unload"); 581 print_value_on(&ls); 582 ls.cr(); 583 } 584 585 // Some items on the _deallocate_list need to free their C heap structures 586 // if they are not already on the _klasses list. 587 unload_deallocate_list(); 588 589 // Tell serviceability tools these classes are unloading 590 // after erroneous classes are released. 591 classes_do(InstanceKlass::notify_unload_class); 592 593 // Clean up global class iterator for compiler 594 static_klass_iterator.adjust_saved_class(this); 595 } 596 597 ModuleEntryTable* ClassLoaderData::modules() { 598 // Lazily create the module entry table at first request. 599 // Lock-free access requires load_acquire. 600 ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules); 601 if (modules == NULL) { 602 MutexLocker m1(Module_lock); 603 // Check if _modules got allocated while we were waiting for this lock. 604 if ((modules = _modules) == NULL) { 605 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size); 606 607 { 608 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 609 // Ensure _modules is stable, since it is examined without a lock 610 OrderAccess::release_store(&_modules, modules); 611 } 612 } 613 } 614 return modules; 615 } 616 617 const int _boot_loader_dictionary_size = 1009; 618 const int _default_loader_dictionary_size = 107; 619 620 Dictionary* ClassLoaderData::create_dictionary() { 621 assert(!is_anonymous(), "anonymous class loader data do not have a dictionary"); 622 int size; 623 bool resizable = false; 624 if (_the_null_class_loader_data == NULL) { 625 size = _boot_loader_dictionary_size; 626 resizable = true; 627 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 628 size = 1; // there's only one class in relection class loader and no initiated classes 629 } else if (is_system_class_loader_data()) { 630 size = _boot_loader_dictionary_size; 631 resizable = true; 632 } else { 633 size = _default_loader_dictionary_size; 634 resizable = true; 635 } 636 if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces || UseSharedSpaces) { 637 resizable = false; 638 } 639 return new Dictionary(this, size, resizable); 640 } 641 642 // Tell the GC to keep this klass alive while iterating ClassLoaderDataGraph 643 oop ClassLoaderData::holder_phantom() const { 644 // A klass that was previously considered dead can be looked up in the 645 // CLD/SD, and its _java_mirror or _class_loader can be stored in a root 646 // or a reachable object making it alive again. The SATB part of G1 needs 647 // to get notified about this potential resurrection, otherwise the marking 648 // might not find the object. 649 if (!_holder.is_null()) { // NULL class_loader 650 return _holder.resolve(); 651 } else { 652 return NULL; 653 } 654 } 655 656 // Unloading support 657 bool ClassLoaderData::is_alive() const { 658 bool alive = keep_alive() // null class loader and incomplete anonymous klasses. 659 || (_holder.peek() != NULL); // and not cleaned by the GC weak handle processing. 660 661 return alive; 662 } 663 664 class ReleaseKlassClosure: public KlassClosure { 665 private: 666 size_t _instance_class_released; 667 size_t _array_class_released; 668 public: 669 ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { } 670 671 size_t instance_class_released() const { return _instance_class_released; } 672 size_t array_class_released() const { return _array_class_released; } 673 674 void do_klass(Klass* k) { 675 if (k->is_array_klass()) { 676 _array_class_released ++; 677 } else { 678 assert(k->is_instance_klass(), "Must be"); 679 _instance_class_released ++; 680 InstanceKlass::release_C_heap_structures(InstanceKlass::cast(k)); 681 } 682 } 683 }; 684 685 ClassLoaderData::~ClassLoaderData() { 686 // Release C heap structures for all the classes. 687 ReleaseKlassClosure cl; 688 classes_do(&cl); 689 690 ClassLoaderDataGraph::dec_array_classes(cl.array_class_released()); 691 ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released()); 692 693 // Release the WeakHandle 694 _holder.release(); 695 696 // Release C heap allocated hashtable for all the packages. 697 if (_packages != NULL) { 698 // Destroy the table itself 699 delete _packages; 700 _packages = NULL; 701 } 702 703 // Release C heap allocated hashtable for all the modules. 704 if (_modules != NULL) { 705 // Destroy the table itself 706 delete _modules; 707 _modules = NULL; 708 } 709 710 // Release C heap allocated hashtable for the dictionary 711 if (_dictionary != NULL) { 712 // Destroy the table itself 713 delete _dictionary; 714 _dictionary = NULL; 715 } 716 717 if (_unnamed_module != NULL) { 718 _unnamed_module->delete_unnamed_module(); 719 _unnamed_module = NULL; 720 } 721 722 // release the metaspace 723 ClassLoaderMetaspace *m = _metaspace; 724 if (m != NULL) { 725 _metaspace = NULL; 726 delete m; 727 } 728 // Clear all the JNI handles for methods 729 // These aren't deallocated and are going to look like a leak, but that's 730 // needed because we can't really get rid of jmethodIDs because we don't 731 // know when native code is going to stop using them. The spec says that 732 // they're "invalid" but existing programs likely rely on their being 733 // NULL after class unloading. 734 if (_jmethod_ids != NULL) { 735 Method::clear_jmethod_ids(this); 736 } 737 // Delete lock 738 delete _metaspace_lock; 739 740 // Delete free list 741 if (_deallocate_list != NULL) { 742 delete _deallocate_list; 743 } 744 } 745 746 // Returns true if this class loader data is for the app class loader 747 // or a user defined system class loader. (Note that the class loader 748 // data may be anonymous.) 749 bool ClassLoaderData::is_system_class_loader_data() const { 750 return SystemDictionary::is_system_class_loader(class_loader()); 751 } 752 753 // Returns true if this class loader data is for the platform class loader. 754 // (Note that the class loader data may be anonymous.) 755 bool ClassLoaderData::is_platform_class_loader_data() const { 756 return SystemDictionary::is_platform_class_loader(class_loader()); 757 } 758 759 // Returns true if the class loader for this class loader data is one of 760 // the 3 builtin (boot application/system or platform) class loaders, 761 // including a user-defined system class loader. Note that if the class 762 // loader data is for an anonymous class then it may get freed by a GC 763 // even if its class loader is one of these loaders. 764 bool ClassLoaderData::is_builtin_class_loader_data() const { 765 return (is_boot_class_loader_data() || 766 SystemDictionary::is_system_class_loader(class_loader()) || 767 SystemDictionary::is_platform_class_loader(class_loader())); 768 } 769 770 // Returns true if this class loader data is a class loader data 771 // that is not ever freed by a GC. It must be one of the builtin 772 // class loaders and not anonymous. 773 bool ClassLoaderData::is_permanent_class_loader_data() const { 774 return is_builtin_class_loader_data() && !is_anonymous(); 775 } 776 777 ClassLoaderMetaspace* ClassLoaderData::metaspace_non_null() { 778 // If the metaspace has not been allocated, create a new one. Might want 779 // to create smaller arena for Reflection class loaders also. 780 // The reason for the delayed allocation is because some class loaders are 781 // simply for delegating with no metadata of their own. 782 // Lock-free access requires load_acquire. 783 ClassLoaderMetaspace* metaspace = OrderAccess::load_acquire(&_metaspace); 784 if (metaspace == NULL) { 785 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); 786 // Check if _metaspace got allocated while we were waiting for this lock. 787 if ((metaspace = _metaspace) == NULL) { 788 if (this == the_null_class_loader_data()) { 789 assert (class_loader() == NULL, "Must be"); 790 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::BootMetaspaceType); 791 } else if (is_anonymous()) { 792 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); 793 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 794 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); 795 } else { 796 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); 797 } 798 // Ensure _metaspace is stable, since it is examined without a lock 799 OrderAccess::release_store(&_metaspace, metaspace); 800 } 801 } 802 return metaspace; 803 } 804 805 OopHandle ClassLoaderData::add_handle(Handle h) { 806 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 807 record_modified_oops(); 808 return OopHandle(_handles.add(h())); 809 } 810 811 void ClassLoaderData::remove_handle(OopHandle h) { 812 assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading"); 813 oop* ptr = h.ptr_raw(); 814 if (ptr != NULL) { 815 assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr)); 816 // This root is not walked in safepoints, and hence requires an appropriate 817 // decorator that e.g. maintains the SATB invariant in SATB collectors. 818 NativeAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, oop(NULL)); 819 } 820 } 821 822 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) { 823 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 824 if (dest.resolve() != NULL) { 825 return; 826 } else { 827 dest = _handles.add(h()); 828 } 829 } 830 831 // Add this metadata pointer to be freed when it's safe. This is only during 832 // class unloading because Handles might point to this metadata field. 833 void ClassLoaderData::add_to_deallocate_list(Metadata* m) { 834 // Metadata in shared region isn't deleted. 835 if (!m->is_shared()) { 836 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 837 if (_deallocate_list == NULL) { 838 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); 839 } 840 _deallocate_list->append_if_missing(m); 841 } 842 } 843 844 // Deallocate free metadata on the free list. How useful the PermGen was! 845 void ClassLoaderData::free_deallocate_list() { 846 // Don't need lock, at safepoint 847 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 848 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading"); 849 if (_deallocate_list == NULL) { 850 return; 851 } 852 // Go backwards because this removes entries that are freed. 853 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 854 Metadata* m = _deallocate_list->at(i); 855 if (!m->on_stack()) { 856 _deallocate_list->remove_at(i); 857 // There are only three types of metadata that we deallocate directly. 858 // Cast them so they can be used by the template function. 859 if (m->is_method()) { 860 MetadataFactory::free_metadata(this, (Method*)m); 861 } else if (m->is_constantPool()) { 862 MetadataFactory::free_metadata(this, (ConstantPool*)m); 863 } else if (m->is_klass()) { 864 MetadataFactory::free_metadata(this, (InstanceKlass*)m); 865 } else { 866 ShouldNotReachHere(); 867 } 868 } else { 869 // Metadata is alive. 870 // If scratch_class is on stack then it shouldn't be on this list! 871 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), 872 "scratch classes on this list should be dead"); 873 // Also should assert that other metadata on the list was found in handles. 874 } 875 } 876 } 877 878 // This is distinct from free_deallocate_list. For class loader data that are 879 // unloading, this frees the C heap memory for items on the list, and unlinks 880 // scratch or error classes so that unloading events aren't triggered for these 881 // classes. The metadata is removed with the unloading metaspace. 882 // There isn't C heap memory allocated for methods, so nothing is done for them. 883 void ClassLoaderData::unload_deallocate_list() { 884 // Don't need lock, at safepoint 885 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 886 assert(is_unloading(), "only called for ClassLoaderData that are unloading"); 887 if (_deallocate_list == NULL) { 888 return; 889 } 890 // Go backwards because this removes entries that are freed. 891 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 892 Metadata* m = _deallocate_list->at(i); 893 assert (!m->on_stack(), "wouldn't be unloading if this were so"); 894 _deallocate_list->remove_at(i); 895 if (m->is_constantPool()) { 896 ((ConstantPool*)m)->release_C_heap_structures(); 897 } else if (m->is_klass()) { 898 InstanceKlass* ik = (InstanceKlass*)m; 899 // also releases ik->constants() C heap memory 900 InstanceKlass::release_C_heap_structures(ik); 901 // Remove the class so unloading events aren't triggered for 902 // this class (scratch or error class) in do_unloading(). 903 remove_class(ik); 904 } 905 } 906 } 907 908 // These anonymous class loaders are to contain classes used for JSR292 909 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(Handle loader) { 910 // Add a new class loader data to the graph. 911 return ClassLoaderDataGraph::add(loader, true); 912 } 913 914 const char* ClassLoaderData::loader_name() const { 915 if (is_unloading()) { 916 if (_class_loader_klass == NULL) { 917 return "<bootloader>"; 918 } else if (_class_loader_name != NULL) { 919 return _class_loader_name->as_C_string(); 920 } else { 921 return _class_loader_klass->name()->as_C_string(); 922 } 923 } else { 924 // Handles null class loader 925 return SystemDictionary::loader_name(class_loader()); 926 } 927 } 928 929 930 void ClassLoaderData::print_value_on(outputStream* out) const { 931 if (!is_unloading() && class_loader() != NULL) { 932 out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this)); 933 class_loader()->print_value_on(out); // includes loader_name() and address of class loader instance 934 } else { 935 // loader data: 0xsomeaddr of <bootloader> 936 out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name()); 937 } 938 if (is_anonymous()) { 939 out->print(" anonymous"); 940 } 941 } 942 943 #ifndef PRODUCT 944 void ClassLoaderData::print_on(outputStream* out) const { 945 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: %s {", 946 p2i(this), p2i(_class_loader.ptr_raw()), loader_name()); 947 if (is_anonymous()) out->print(" anonymous"); 948 if (claimed()) out->print(" claimed"); 949 if (is_unloading()) out->print(" unloading"); 950 out->print(" metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null())); 951 952 if (_jmethod_ids != NULL) { 953 Method::print_jmethod_ids(this, out); 954 } 955 out->print(" handles count %d", _handles.count()); 956 out->print(" dependencies %d", _dependency_count); 957 out->print_cr("}"); 958 } 959 #endif // PRODUCT 960 961 void ClassLoaderData::verify() { 962 assert_locked_or_safepoint(_metaspace_lock); 963 oop cl = class_loader(); 964 965 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same"); 966 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be"); 967 968 // Verify the integrity of the allocated space. 969 if (metaspace_or_null() != NULL) { 970 metaspace_or_null()->verify(); 971 } 972 973 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 974 guarantee(k->class_loader_data() == this, "Must be the same"); 975 k->verify(); 976 assert(k != k->next_link(), "no loops!"); 977 } 978 } 979 980 bool ClassLoaderData::contains_klass(Klass* klass) { 981 // Lock-free access requires load_acquire 982 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 983 if (k == klass) return true; 984 } 985 return false; 986 } 987 988 989 // GC root of class loader data created. 990 ClassLoaderData* ClassLoaderDataGraph::_head = NULL; 991 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL; 992 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL; 993 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL; 994 995 bool ClassLoaderDataGraph::_should_purge = false; 996 bool ClassLoaderDataGraph::_metaspace_oom = false; 997 998 // Add a new class loader data node to the list. Assign the newly created 999 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field 1000 ClassLoaderData* ClassLoaderDataGraph::add_to_graph(Handle loader, bool is_anonymous) { 1001 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the 1002 // ClassLoaderData in the graph since the CLD 1003 // contains oops in _handles that must be walked. 1004 1005 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous); 1006 1007 if (!is_anonymous) { 1008 // First, Atomically set it 1009 ClassLoaderData* old = java_lang_ClassLoader::cmpxchg_loader_data(cld, loader(), NULL); 1010 if (old != NULL) { 1011 delete cld; 1012 // Returns the data. 1013 return old; 1014 } 1015 } 1016 1017 // We won the race, and therefore the task of adding the data to the list of 1018 // class loader data 1019 ClassLoaderData** list_head = &_head; 1020 ClassLoaderData* next = _head; 1021 1022 do { 1023 cld->set_next(next); 1024 ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next); 1025 if (exchanged == next) { 1026 LogTarget(Debug, class, loader, data) lt; 1027 if (lt.is_enabled()) { 1028 ResourceMark rm; 1029 LogStream ls(lt); 1030 ls.print("create "); 1031 cld->print_value_on(&ls); 1032 ls.cr(); 1033 } 1034 return cld; 1035 } 1036 next = exchanged; 1037 } while (true); 1038 } 1039 1040 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous) { 1041 ClassLoaderData* loader_data = add_to_graph(loader, is_anonymous); 1042 // Initialize name and class after the loader data is added to the CLDG 1043 // because adding the Symbol for the name might safepoint. 1044 if (loader.not_null()) { 1045 loader_data->initialize_name_and_klass(loader); 1046 } 1047 return loader_data; 1048 } 1049 1050 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) { 1051 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1052 cld->oops_do(f, must_claim); 1053 } 1054 } 1055 1056 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) { 1057 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1058 if (cld->keep_alive()) { 1059 cld->oops_do(f, must_claim); 1060 } 1061 } 1062 } 1063 1064 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) { 1065 if (ClassUnloading) { 1066 keep_alive_oops_do(f, must_claim); 1067 } else { 1068 oops_do(f, must_claim); 1069 } 1070 } 1071 1072 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) { 1073 for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) { 1074 cl->do_cld(cld); 1075 } 1076 } 1077 1078 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { 1079 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1080 // Only walk the head until any clds not purged from prior unloading 1081 // (CMS doesn't purge right away). 1082 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1083 assert(cld->is_unloading(), "invariant"); 1084 cl->do_cld(cld); 1085 } 1086 } 1087 1088 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { 1089 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { 1090 CLDClosure* closure = cld->keep_alive() ? strong : weak; 1091 if (closure != NULL) { 1092 closure->do_cld(cld); 1093 } 1094 } 1095 } 1096 1097 void ClassLoaderDataGraph::keep_alive_cld_do(CLDClosure* cl) { 1098 roots_cld_do(cl, NULL); 1099 } 1100 1101 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) { 1102 if (ClassUnloading) { 1103 keep_alive_cld_do(cl); 1104 } else { 1105 cld_do(cl); 1106 } 1107 } 1108 1109 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) { 1110 Thread* thread = Thread::current(); 1111 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1112 Handle holder(thread, cld->holder_phantom()); 1113 cld->classes_do(klass_closure); 1114 } 1115 } 1116 1117 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) { 1118 Thread* thread = Thread::current(); 1119 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1120 Handle holder(thread, cld->holder_phantom()); 1121 cld->classes_do(f); 1122 } 1123 } 1124 1125 void ClassLoaderDataGraph::methods_do(void f(Method*)) { 1126 Thread* thread = Thread::current(); 1127 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1128 Handle holder(thread, cld->holder_phantom()); 1129 cld->methods_do(f); 1130 } 1131 } 1132 1133 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) { 1134 assert_locked_or_safepoint(Module_lock); 1135 Thread* thread = Thread::current(); 1136 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1137 Handle holder(thread, cld->holder_phantom()); 1138 cld->modules_do(f); 1139 } 1140 } 1141 1142 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) { 1143 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1144 // Only walk the head until any clds not purged from prior unloading 1145 // (CMS doesn't purge right away). 1146 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1147 assert(cld->is_unloading(), "invariant"); 1148 cld->modules_do(f); 1149 } 1150 } 1151 1152 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) { 1153 assert_locked_or_safepoint(Module_lock); 1154 Thread* thread = Thread::current(); 1155 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1156 Handle holder(thread, cld->holder_phantom()); 1157 cld->packages_do(f); 1158 } 1159 } 1160 1161 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) { 1162 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1163 // Only walk the head until any clds not purged from prior unloading 1164 // (CMS doesn't purge right away). 1165 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1166 assert(cld->is_unloading(), "invariant"); 1167 cld->packages_do(f); 1168 } 1169 } 1170 1171 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) { 1172 Thread* thread = Thread::current(); 1173 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1174 Handle holder(thread, cld->holder_phantom()); 1175 cld->loaded_classes_do(klass_closure); 1176 } 1177 } 1178 1179 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) { 1180 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1181 // Only walk the head until any clds not purged from prior unloading 1182 // (CMS doesn't purge right away). 1183 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1184 assert(cld->is_unloading(), "invariant"); 1185 cld->classes_do(f); 1186 } 1187 } 1188 1189 #define FOR_ALL_DICTIONARY(X) for (ClassLoaderData* X = _head; X != NULL; X = X->next()) \ 1190 if (X->dictionary() != NULL) 1191 1192 // Walk classes in the loaded class dictionaries in various forms. 1193 // Only walks the classes defined in this class loader. 1194 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) { 1195 Thread* thread = Thread::current(); 1196 FOR_ALL_DICTIONARY(cld) { 1197 Handle holder(thread, cld->holder_phantom()); 1198 cld->dictionary()->classes_do(f); 1199 } 1200 } 1201 1202 // Only walks the classes defined in this class loader. 1203 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) { 1204 Thread* thread = Thread::current(); 1205 FOR_ALL_DICTIONARY(cld) { 1206 Handle holder(thread, cld->holder_phantom()); 1207 cld->dictionary()->classes_do(f, CHECK); 1208 } 1209 } 1210 1211 // Walks all entries in the dictionary including entries initiated by this class loader. 1212 void ClassLoaderDataGraph::dictionary_all_entries_do(void f(InstanceKlass*, ClassLoaderData*)) { 1213 Thread* thread = Thread::current(); 1214 FOR_ALL_DICTIONARY(cld) { 1215 Handle holder(thread, cld->holder_phantom()); 1216 cld->dictionary()->all_entries_do(f); 1217 } 1218 } 1219 1220 void ClassLoaderDataGraph::verify_dictionary() { 1221 FOR_ALL_DICTIONARY(cld) { 1222 cld->dictionary()->verify(); 1223 } 1224 } 1225 1226 void ClassLoaderDataGraph::print_dictionary(outputStream* st) { 1227 FOR_ALL_DICTIONARY(cld) { 1228 st->print("Dictionary for "); 1229 cld->print_value_on(st); 1230 st->cr(); 1231 cld->dictionary()->print_on(st); 1232 st->cr(); 1233 } 1234 } 1235 1236 void ClassLoaderDataGraph::print_dictionary_statistics(outputStream* st) { 1237 FOR_ALL_DICTIONARY(cld) { 1238 ResourceMark rm; 1239 stringStream tempst; 1240 tempst.print("System Dictionary for %s", cld->loader_name()); 1241 cld->dictionary()->print_table_statistics(st, tempst.as_string()); 1242 } 1243 } 1244 1245 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() { 1246 assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?"); 1247 1248 GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>(); 1249 1250 // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true); 1251 ClassLoaderData* curr = _head; 1252 while (curr != _saved_head) { 1253 if (!curr->claimed()) { 1254 array->push(curr); 1255 LogTarget(Debug, class, loader, data) lt; 1256 if (lt.is_enabled()) { 1257 LogStream ls(lt); 1258 ls.print("found new CLD: "); 1259 curr->print_value_on(&ls); 1260 ls.cr(); 1261 } 1262 } 1263 1264 curr = curr->_next; 1265 } 1266 1267 return array; 1268 } 1269 1270 #ifndef PRODUCT 1271 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) { 1272 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1273 if (loader_data == data) { 1274 return true; 1275 } 1276 } 1277 1278 return false; 1279 } 1280 #endif // PRODUCT 1281 1282 #if INCLUDE_JFR 1283 static Ticks class_unload_time; 1284 static void post_class_unload_event(Klass* const k) { 1285 assert(k != NULL, "invariant"); 1286 EventClassUnload event(UNTIMED); 1287 event.set_endtime(class_unload_time); 1288 event.set_unloadedClass(k); 1289 event.set_definingClassLoader(k->class_loader_data()); 1290 event.commit(); 1291 } 1292 1293 static void post_class_unload_events() { 1294 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1295 if (Jfr::is_enabled()) { 1296 if (EventClassUnload::is_enabled()) { 1297 class_unload_time = Ticks::now(); 1298 ClassLoaderDataGraph::classes_unloading_do(&post_class_unload_event); 1299 } 1300 Jfr::on_unloading_classes(); 1301 } 1302 } 1303 #endif // INCLUDE_JFR 1304 1305 // Move class loader data from main list to the unloaded list for unloading 1306 // and deallocation later. 1307 bool ClassLoaderDataGraph::do_unloading(bool clean_previous_versions) { 1308 1309 ClassLoaderData* data = _head; 1310 ClassLoaderData* prev = NULL; 1311 bool seen_dead_loader = false; 1312 uint loaders_processed = 0; 1313 uint loaders_removed = 0; 1314 1315 // Mark metadata seen on the stack only so we can delete unneeded entries. 1316 // Only walk all metadata, including the expensive code cache walk, for Full GC 1317 // and only if class redefinition and if there's previous versions of 1318 // Klasses to delete. 1319 bool walk_all_metadata = clean_previous_versions && 1320 JvmtiExport::has_redefined_a_class() && 1321 InstanceKlass::has_previous_versions_and_reset(); 1322 MetadataOnStackMark md_on_stack(walk_all_metadata); 1323 1324 // Save previous _unloading pointer for CMS which may add to unloading list before 1325 // purging and we don't want to rewalk the previously unloaded class loader data. 1326 _saved_unloading = _unloading; 1327 1328 data = _head; 1329 while (data != NULL) { 1330 if (data->is_alive()) { 1331 // clean metaspace 1332 if (walk_all_metadata) { 1333 data->classes_do(InstanceKlass::purge_previous_versions); 1334 } 1335 data->free_deallocate_list(); 1336 prev = data; 1337 data = data->next(); 1338 loaders_processed++; 1339 continue; 1340 } 1341 seen_dead_loader = true; 1342 loaders_removed++; 1343 ClassLoaderData* dead = data; 1344 dead->unload(); 1345 data = data->next(); 1346 // Remove from loader list. 1347 // This class loader data will no longer be found 1348 // in the ClassLoaderDataGraph. 1349 if (prev != NULL) { 1350 prev->set_next(data); 1351 } else { 1352 assert(dead == _head, "sanity check"); 1353 _head = data; 1354 } 1355 dead->set_next(_unloading); 1356 _unloading = dead; 1357 } 1358 1359 if (seen_dead_loader) { 1360 data = _head; 1361 while (data != NULL) { 1362 // Remove entries in the dictionary of live class loader that have 1363 // initiated loading classes in a dead class loader. 1364 if (data->dictionary() != NULL) { 1365 data->dictionary()->do_unloading(); 1366 } 1367 // Walk a ModuleEntry's reads, and a PackageEntry's exports 1368 // lists to determine if there are modules on those lists that are now 1369 // dead and should be removed. A module's life cycle is equivalent 1370 // to its defining class loader's life cycle. Since a module is 1371 // considered dead if its class loader is dead, these walks must 1372 // occur after each class loader's aliveness is determined. 1373 if (data->packages() != NULL) { 1374 data->packages()->purge_all_package_exports(); 1375 } 1376 if (data->modules_defined()) { 1377 data->modules()->purge_all_module_reads(); 1378 } 1379 data = data->next(); 1380 } 1381 JFR_ONLY(post_class_unload_events();) 1382 } 1383 1384 log_debug(class, loader, data)("do_unloading: loaders processed %u, loaders removed %u", loaders_processed, loaders_removed); 1385 1386 return seen_dead_loader; 1387 } 1388 1389 void ClassLoaderDataGraph::purge() { 1390 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1391 ClassLoaderData* list = _unloading; 1392 _unloading = NULL; 1393 ClassLoaderData* next = list; 1394 bool classes_unloaded = false; 1395 while (next != NULL) { 1396 ClassLoaderData* purge_me = next; 1397 next = purge_me->next(); 1398 delete purge_me; 1399 classes_unloaded = true; 1400 } 1401 if (classes_unloaded) { 1402 Metaspace::purge(); 1403 set_metaspace_oom(false); 1404 } 1405 } 1406 1407 int ClassLoaderDataGraph::resize_if_needed() { 1408 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1409 int resized = 0; 1410 if (Dictionary::does_any_dictionary_needs_resizing()) { 1411 FOR_ALL_DICTIONARY(cld) { 1412 if (cld->dictionary()->resize_if_needed()) { 1413 resized++; 1414 } 1415 } 1416 } 1417 return resized; 1418 } 1419 1420 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic() 1421 : _next_klass(NULL) { 1422 ClassLoaderData* cld = ClassLoaderDataGraph::_head; 1423 Klass* klass = NULL; 1424 1425 // Find the first klass in the CLDG. 1426 while (cld != NULL) { 1427 assert_locked_or_safepoint(cld->metaspace_lock()); 1428 klass = cld->_klasses; 1429 if (klass != NULL) { 1430 _next_klass = klass; 1431 return; 1432 } 1433 cld = cld->next(); 1434 } 1435 } 1436 1437 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) { 1438 Klass* next = klass->next_link(); 1439 if (next != NULL) { 1440 return next; 1441 } 1442 1443 // No more klasses in the current CLD. Time to find a new CLD. 1444 ClassLoaderData* cld = klass->class_loader_data(); 1445 assert_locked_or_safepoint(cld->metaspace_lock()); 1446 while (next == NULL) { 1447 cld = cld->next(); 1448 if (cld == NULL) { 1449 break; 1450 } 1451 next = cld->_klasses; 1452 } 1453 1454 return next; 1455 } 1456 1457 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() { 1458 Klass* head = _next_klass; 1459 1460 while (head != NULL) { 1461 Klass* next = next_klass_in_cldg(head); 1462 1463 Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head); 1464 1465 if (old_head == head) { 1466 return head; // Won the CAS. 1467 } 1468 1469 head = old_head; 1470 } 1471 1472 // Nothing more for the iterator to hand out. 1473 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head)); 1474 return NULL; 1475 } 1476 1477 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() { 1478 _data = ClassLoaderDataGraph::_head; 1479 } 1480 1481 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {} 1482 1483 #ifndef PRODUCT 1484 // callable from debugger 1485 extern "C" int print_loader_data_graph() { 1486 ResourceMark rm; 1487 ClassLoaderDataGraph::print_on(tty); 1488 return 0; 1489 } 1490 1491 void ClassLoaderDataGraph::verify() { 1492 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1493 data->verify(); 1494 } 1495 } 1496 1497 void ClassLoaderDataGraph::print_on(outputStream * const out) { 1498 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1499 data->print_on(out); 1500 } 1501 } 1502 #endif // PRODUCT