1 /* 2 * Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 // A ClassLoaderData identifies the full set of class types that a class 26 // loader's name resolution strategy produces for a given configuration of the 27 // class loader. 28 // Class types in the ClassLoaderData may be defined by from class file binaries 29 // provided by the class loader, or from other class loader it interacts with 30 // according to its name resolution strategy. 31 // 32 // Class loaders that implement a deterministic name resolution strategy 33 // (including with respect to their delegation behavior), such as the boot, the 34 // platform, and the system loaders of the JDK's built-in class loader 35 // hierarchy, always produce the same linkset for a given configuration. 36 // 37 // ClassLoaderData carries information related to a linkset (e.g., 38 // metaspace holding its klass definitions). 39 // The System Dictionary and related data structures (e.g., placeholder table, 40 // loader constraints table) as well as the runtime representation of classes 41 // only reference ClassLoaderData. 42 // 43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that 44 // that represent the loader's "linking domain" in the JVM. 45 // 46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData, 47 // the singleton class the_null_class_loader_data(). 48 49 #include "precompiled.hpp" 50 #include "classfile/classLoaderData.hpp" 51 #include "classfile/classLoaderData.inline.hpp" 52 #include "classfile/dictionary.hpp" 53 #include "classfile/javaClasses.hpp" 54 #include "classfile/metadataOnStackMark.hpp" 55 #include "classfile/moduleEntry.hpp" 56 #include "classfile/packageEntry.hpp" 57 #include "classfile/symbolTable.hpp" 58 #include "classfile/systemDictionary.hpp" 59 #include "logging/log.hpp" 60 #include "logging/logStream.hpp" 61 #include "memory/allocation.inline.hpp" 62 #include "memory/metadataFactory.hpp" 63 #include "memory/metaspaceShared.hpp" 64 #include "memory/resourceArea.hpp" 65 #include "memory/universe.hpp" 66 #include "oops/access.inline.hpp" 67 #include "oops/oop.inline.hpp" 68 #include "oops/oopHandle.inline.hpp" 69 #include "oops/weakHandle.inline.hpp" 70 #include "runtime/atomic.hpp" 71 #include "runtime/handles.inline.hpp" 72 #include "runtime/mutex.hpp" 73 #include "runtime/orderAccess.hpp" 74 #include "runtime/safepoint.hpp" 75 #include "runtime/safepointVerifiers.hpp" 76 #include "utilities/growableArray.hpp" 77 #include "utilities/macros.hpp" 78 #include "utilities/ostream.hpp" 79 #include "utilities/ticks.hpp" 80 #if INCLUDE_JFR 81 #include "jfr/jfr.hpp" 82 #include "jfr/jfrEvents.hpp" 83 #endif 84 85 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0; 86 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0; 87 88 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL; 89 90 void ClassLoaderData::init_null_class_loader_data() { 91 assert(_the_null_class_loader_data == NULL, "cannot initialize twice"); 92 assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice"); 93 94 _the_null_class_loader_data = new ClassLoaderData(Handle(), false); 95 ClassLoaderDataGraph::_head = _the_null_class_loader_data; 96 assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be"); 97 98 LogTarget(Debug, class, loader, data) lt; 99 if (lt.is_enabled()) { 100 ResourceMark rm; 101 LogStream ls(lt); 102 ls.print("create "); 103 _the_null_class_loader_data->print_value_on(&ls); 104 ls.cr(); 105 } 106 } 107 108 // Obtain and set the class loader's name within the ClassLoaderData so 109 // it will be available for error messages, logging, JFR, etc. The name 110 // and klass are available after the class_loader oop is no longer alive, 111 // during unloading. 112 void ClassLoaderData::initialize_name(Handle class_loader) { 113 Thread* THREAD = Thread::current(); 114 ResourceMark rm(THREAD); 115 116 // Obtain the class loader's name. If the class loader's name was not 117 // explicitly set during construction, the CLD's _name field will be null. 118 oop cl_name = java_lang_ClassLoader::name(class_loader()); 119 if (cl_name != NULL) { 120 const char* cl_instance_name = java_lang_String::as_utf8_string(cl_name); 121 122 if (cl_instance_name != NULL && cl_instance_name[0] != '\0') { 123 // Can't throw InternalError and SymbolTable doesn't throw OOM anymore. 124 _name = SymbolTable::new_symbol(cl_instance_name, CATCH); 125 } 126 } 127 128 // Obtain the class loader's name and identity hash. If the class loader's 129 // name was not explicitly set during construction, the class loader's name and id 130 // will be set to the qualified class name of the class loader along with its 131 // identity hash. 132 // If for some reason the ClassLoader's constructor has not been run, instead of 133 // leaving the _name_and_id field null, fall back to the external qualified class 134 // name. Thus CLD's _name_and_id field should never have a null value. 135 oop cl_name_and_id = java_lang_ClassLoader::nameAndId(class_loader()); 136 const char* cl_instance_name_and_id = 137 (cl_name_and_id == NULL) ? _class_loader_klass->external_name() : 138 java_lang_String::as_utf8_string(cl_name_and_id); 139 assert(cl_instance_name_and_id != NULL && cl_instance_name_and_id[0] != '\0', "class loader has no name and id"); 140 // Can't throw InternalError and SymbolTable doesn't throw OOM anymore. 141 _name_and_id = SymbolTable::new_symbol(cl_instance_name_and_id, CATCH); 142 } 143 144 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous) : 145 _metaspace(NULL), 146 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true, 147 Monitor::_safepoint_check_never)), 148 _unloading(false), _is_anonymous(is_anonymous), 149 _modified_oops(true), _accumulated_modified_oops(false), 150 // An anonymous class loader data doesn't have anything to keep 151 // it from being unloaded during parsing of the anonymous class. 152 // The null-class-loader should always be kept alive. 153 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0), 154 _claimed(0), 155 _handles(), 156 _klasses(NULL), _packages(NULL), _modules(NULL), _unnamed_module(NULL), _dictionary(NULL), 157 _jmethod_ids(NULL), 158 _deallocate_list(NULL), 159 _next(NULL), 160 _class_loader_klass(NULL), _name(NULL), _name_and_id(NULL) { 161 162 if (!h_class_loader.is_null()) { 163 _class_loader = _handles.add(h_class_loader()); 164 _class_loader_klass = h_class_loader->klass(); 165 } 166 167 if (!is_anonymous) { 168 // The holder is initialized later for anonymous classes, and before calling anything 169 // that call class_loader(). 170 initialize_holder(h_class_loader); 171 172 // A ClassLoaderData created solely for an anonymous class should never have a 173 // ModuleEntryTable or PackageEntryTable created for it. The defining package 174 // and module for an anonymous class will be found in its host class. 175 _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size); 176 if (h_class_loader.is_null()) { 177 // Create unnamed module for boot loader 178 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this); 179 } else { 180 // Create unnamed module for all other loaders 181 _unnamed_module = ModuleEntry::create_unnamed_module(this); 182 } 183 _dictionary = create_dictionary(); 184 } 185 186 NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies 187 188 JFR_ONLY(INIT_ID(this);) 189 } 190 191 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() { 192 Chunk* c = _head; 193 while (c != NULL) { 194 Chunk* next = c->_next; 195 delete c; 196 c = next; 197 } 198 } 199 200 oop* ClassLoaderData::ChunkedHandleList::add(oop o) { 201 if (_head == NULL || _head->_size == Chunk::CAPACITY) { 202 Chunk* next = new Chunk(_head); 203 OrderAccess::release_store(&_head, next); 204 } 205 oop* handle = &_head->_data[_head->_size]; 206 *handle = o; 207 OrderAccess::release_store(&_head->_size, _head->_size + 1); 208 return handle; 209 } 210 211 int ClassLoaderData::ChunkedHandleList::count() const { 212 int count = 0; 213 Chunk* chunk = _head; 214 while (chunk != NULL) { 215 count += chunk->_size; 216 chunk = chunk->_next; 217 } 218 return count; 219 } 220 221 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) { 222 for (juint i = 0; i < size; i++) { 223 if (c->_data[i] != NULL) { 224 f->do_oop(&c->_data[i]); 225 } 226 } 227 } 228 229 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) { 230 Chunk* head = OrderAccess::load_acquire(&_head); 231 if (head != NULL) { 232 // Must be careful when reading size of head 233 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size)); 234 for (Chunk* c = head->_next; c != NULL; c = c->_next) { 235 oops_do_chunk(f, c, c->_size); 236 } 237 } 238 } 239 240 class VerifyContainsOopClosure : public OopClosure { 241 oop _target; 242 bool _found; 243 244 public: 245 VerifyContainsOopClosure(oop target) : _target(target), _found(false) {} 246 247 void do_oop(oop* p) { 248 if (p != NULL && oopDesc::equals(RawAccess<>::oop_load(p), _target)) { 249 _found = true; 250 } 251 } 252 253 void do_oop(narrowOop* p) { 254 // The ChunkedHandleList should not contain any narrowOop 255 ShouldNotReachHere(); 256 } 257 258 bool found() const { 259 return _found; 260 } 261 }; 262 263 bool ClassLoaderData::ChunkedHandleList::contains(oop p) { 264 VerifyContainsOopClosure cl(p); 265 oops_do(&cl); 266 return cl.found(); 267 } 268 269 #ifndef PRODUCT 270 bool ClassLoaderData::ChunkedHandleList::owner_of(oop* oop_handle) { 271 Chunk* chunk = _head; 272 while (chunk != NULL) { 273 if (&(chunk->_data[0]) <= oop_handle && oop_handle < &(chunk->_data[chunk->_size])) { 274 return true; 275 } 276 chunk = chunk->_next; 277 } 278 return false; 279 } 280 #endif // PRODUCT 281 282 bool ClassLoaderData::claim() { 283 if (_claimed == 1) { 284 return false; 285 } 286 287 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0; 288 } 289 290 // Anonymous classes have their own ClassLoaderData that is marked to keep alive 291 // while the class is being parsed, and if the class appears on the module fixup list. 292 // Due to the uniqueness that no other class shares the anonymous class' name or 293 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while 294 // it is being defined, therefore _keep_alive is not volatile or atomic. 295 void ClassLoaderData::inc_keep_alive() { 296 if (is_anonymous()) { 297 assert(_keep_alive >= 0, "Invalid keep alive increment count"); 298 _keep_alive++; 299 } 300 } 301 302 void ClassLoaderData::dec_keep_alive() { 303 if (is_anonymous()) { 304 assert(_keep_alive > 0, "Invalid keep alive decrement count"); 305 _keep_alive--; 306 } 307 } 308 309 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) { 310 if (must_claim && !claim()) { 311 return; 312 } 313 314 // Only clear modified_oops after the ClassLoaderData is claimed. 315 if (clear_mod_oops) { 316 clear_modified_oops(); 317 } 318 319 _handles.oops_do(f); 320 } 321 322 void ClassLoaderData::classes_do(KlassClosure* klass_closure) { 323 // Lock-free access requires load_acquire 324 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 325 klass_closure->do_klass(k); 326 assert(k != k->next_link(), "no loops!"); 327 } 328 } 329 330 void ClassLoaderData::classes_do(void f(Klass * const)) { 331 // Lock-free access requires load_acquire 332 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 333 f(k); 334 assert(k != k->next_link(), "no loops!"); 335 } 336 } 337 338 void ClassLoaderData::methods_do(void f(Method*)) { 339 // Lock-free access requires load_acquire 340 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 341 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) { 342 InstanceKlass::cast(k)->methods_do(f); 343 } 344 } 345 } 346 347 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) { 348 // Lock-free access requires load_acquire 349 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 350 // Do not filter ArrayKlass oops here... 351 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) { 352 klass_closure->do_klass(k); 353 } 354 } 355 } 356 357 void ClassLoaderData::classes_do(void f(InstanceKlass*)) { 358 // Lock-free access requires load_acquire 359 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 360 if (k->is_instance_klass()) { 361 f(InstanceKlass::cast(k)); 362 } 363 assert(k != k->next_link(), "no loops!"); 364 } 365 } 366 367 void ClassLoaderData::modules_do(void f(ModuleEntry*)) { 368 assert_locked_or_safepoint(Module_lock); 369 if (_unnamed_module != NULL) { 370 f(_unnamed_module); 371 } 372 if (_modules != NULL) { 373 for (int i = 0; i < _modules->table_size(); i++) { 374 for (ModuleEntry* entry = _modules->bucket(i); 375 entry != NULL; 376 entry = entry->next()) { 377 f(entry); 378 } 379 } 380 } 381 } 382 383 void ClassLoaderData::packages_do(void f(PackageEntry*)) { 384 assert_locked_or_safepoint(Module_lock); 385 if (_packages != NULL) { 386 for (int i = 0; i < _packages->table_size(); i++) { 387 for (PackageEntry* entry = _packages->bucket(i); 388 entry != NULL; 389 entry = entry->next()) { 390 f(entry); 391 } 392 } 393 } 394 } 395 396 void ClassLoaderData::record_dependency(const Klass* k) { 397 assert(k != NULL, "invariant"); 398 399 ClassLoaderData * const from_cld = this; 400 ClassLoaderData * const to_cld = k->class_loader_data(); 401 402 // Do not need to record dependency if the dependency is to a class whose 403 // class loader data is never freed. (i.e. the dependency's class loader 404 // is one of the three builtin class loaders and the dependency is not 405 // anonymous.) 406 if (to_cld->is_permanent_class_loader_data()) { 407 return; 408 } 409 410 oop to; 411 if (to_cld->is_anonymous()) { 412 // Just return if an anonymous class is attempting to record a dependency 413 // to itself. (Note that every anonymous class has its own unique class 414 // loader data.) 415 if (to_cld == from_cld) { 416 return; 417 } 418 // Anonymous class dependencies are through the mirror. 419 to = k->java_mirror(); 420 } else { 421 to = to_cld->class_loader(); 422 oop from = from_cld->class_loader(); 423 424 // Just return if this dependency is to a class with the same or a parent 425 // class_loader. 426 if (oopDesc::equals(from, to) || java_lang_ClassLoader::isAncestor(from, to)) { 427 return; // this class loader is in the parent list, no need to add it. 428 } 429 } 430 431 // It's a dependency we won't find through GC, add it. 432 if (!_handles.contains(to)) { 433 NOT_PRODUCT(Atomic::inc(&_dependency_count)); 434 LogTarget(Trace, class, loader, data) lt; 435 if (lt.is_enabled()) { 436 ResourceMark rm; 437 LogStream ls(lt); 438 ls.print("adding dependency from "); 439 print_value_on(&ls); 440 ls.print(" to "); 441 to_cld->print_value_on(&ls); 442 ls.cr(); 443 } 444 Handle dependency(Thread::current(), to); 445 add_handle(dependency); 446 // Added a potentially young gen oop to the ClassLoaderData 447 record_modified_oops(); 448 } 449 } 450 451 452 void ClassLoaderDataGraph::clear_claimed_marks() { 453 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 454 cld->clear_claimed(); 455 } 456 } 457 458 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) { 459 { 460 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 461 Klass* old_value = _klasses; 462 k->set_next_link(old_value); 463 // Link the new item into the list, making sure the linked class is stable 464 // since the list can be walked without a lock 465 OrderAccess::release_store(&_klasses, k); 466 if (k->is_array_klass()) { 467 ClassLoaderDataGraph::inc_array_classes(1); 468 } else { 469 ClassLoaderDataGraph::inc_instance_classes(1); 470 } 471 } 472 473 if (publicize) { 474 LogTarget(Trace, class, loader, data) lt; 475 if (lt.is_enabled()) { 476 ResourceMark rm; 477 LogStream ls(lt); 478 ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name()); 479 print_value_on(&ls); 480 ls.cr(); 481 } 482 } 483 } 484 485 // Class iterator used by the compiler. It gets some number of classes at 486 // a safepoint to decay invocation counters on the methods. 487 class ClassLoaderDataGraphKlassIteratorStatic { 488 ClassLoaderData* _current_loader_data; 489 Klass* _current_class_entry; 490 public: 491 492 ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {} 493 494 InstanceKlass* try_get_next_class() { 495 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 496 size_t max_classes = ClassLoaderDataGraph::num_instance_classes(); 497 assert(max_classes > 0, "should not be called with no instance classes"); 498 for (size_t i = 0; i < max_classes; ) { 499 500 if (_current_class_entry != NULL) { 501 Klass* k = _current_class_entry; 502 _current_class_entry = _current_class_entry->next_link(); 503 504 if (k->is_instance_klass()) { 505 InstanceKlass* ik = InstanceKlass::cast(k); 506 i++; // count all instance classes found 507 // Not yet loaded classes are counted in max_classes 508 // but only return loaded classes. 509 if (ik->is_loaded()) { 510 return ik; 511 } 512 } 513 } else { 514 // Go to next CLD 515 if (_current_loader_data != NULL) { 516 _current_loader_data = _current_loader_data->next(); 517 } 518 // Start at the beginning 519 if (_current_loader_data == NULL) { 520 _current_loader_data = ClassLoaderDataGraph::_head; 521 } 522 523 _current_class_entry = _current_loader_data->klasses(); 524 } 525 } 526 // Should never be reached unless all instance classes have failed or are not fully loaded. 527 // Caller handles NULL. 528 return NULL; 529 } 530 531 // If the current class for the static iterator is a class being unloaded or 532 // deallocated, adjust the current class. 533 void adjust_saved_class(ClassLoaderData* cld) { 534 if (_current_loader_data == cld) { 535 _current_loader_data = cld->next(); 536 if (_current_loader_data != NULL) { 537 _current_class_entry = _current_loader_data->klasses(); 538 } // else try_get_next_class will start at the head 539 } 540 } 541 542 void adjust_saved_class(Klass* klass) { 543 if (_current_class_entry == klass) { 544 _current_class_entry = klass->next_link(); 545 } 546 } 547 }; 548 549 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator; 550 551 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() { 552 return static_klass_iterator.try_get_next_class(); 553 } 554 555 556 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) { 557 if (loader_or_mirror() != NULL) { 558 assert(_holder.is_null(), "never replace holders"); 559 _holder = WeakHandle<vm_class_loader_data>::create(loader_or_mirror); 560 } 561 } 562 563 // Remove a klass from the _klasses list for scratch_class during redefinition 564 // or parsed class in the case of an error. 565 void ClassLoaderData::remove_class(Klass* scratch_class) { 566 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 567 568 // Adjust global class iterator. 569 static_klass_iterator.adjust_saved_class(scratch_class); 570 571 Klass* prev = NULL; 572 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 573 if (k == scratch_class) { 574 if (prev == NULL) { 575 _klasses = k->next_link(); 576 } else { 577 Klass* next = k->next_link(); 578 prev->set_next_link(next); 579 } 580 581 if (k->is_array_klass()) { 582 ClassLoaderDataGraph::dec_array_classes(1); 583 } else { 584 ClassLoaderDataGraph::dec_instance_classes(1); 585 } 586 587 return; 588 } 589 prev = k; 590 assert(k != k->next_link(), "no loops!"); 591 } 592 ShouldNotReachHere(); // should have found this class!! 593 } 594 595 void ClassLoaderData::unload() { 596 _unloading = true; 597 598 LogTarget(Debug, class, loader, data) lt; 599 if (lt.is_enabled()) { 600 ResourceMark rm; 601 LogStream ls(lt); 602 ls.print("unload"); 603 print_value_on(&ls); 604 ls.cr(); 605 } 606 607 // Some items on the _deallocate_list need to free their C heap structures 608 // if they are not already on the _klasses list. 609 unload_deallocate_list(); 610 611 // Tell serviceability tools these classes are unloading 612 // after erroneous classes are released. 613 classes_do(InstanceKlass::notify_unload_class); 614 615 // Clean up global class iterator for compiler 616 static_klass_iterator.adjust_saved_class(this); 617 } 618 619 ModuleEntryTable* ClassLoaderData::modules() { 620 // Lazily create the module entry table at first request. 621 // Lock-free access requires load_acquire. 622 ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules); 623 if (modules == NULL) { 624 MutexLocker m1(Module_lock); 625 // Check if _modules got allocated while we were waiting for this lock. 626 if ((modules = _modules) == NULL) { 627 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size); 628 629 { 630 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 631 // Ensure _modules is stable, since it is examined without a lock 632 OrderAccess::release_store(&_modules, modules); 633 } 634 } 635 } 636 return modules; 637 } 638 639 const int _boot_loader_dictionary_size = 1009; 640 const int _default_loader_dictionary_size = 107; 641 642 Dictionary* ClassLoaderData::create_dictionary() { 643 assert(!is_anonymous(), "anonymous class loader data do not have a dictionary"); 644 int size; 645 bool resizable = false; 646 if (_the_null_class_loader_data == NULL) { 647 size = _boot_loader_dictionary_size; 648 resizable = true; 649 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 650 size = 1; // there's only one class in relection class loader and no initiated classes 651 } else if (is_system_class_loader_data()) { 652 size = _boot_loader_dictionary_size; 653 resizable = true; 654 } else { 655 size = _default_loader_dictionary_size; 656 resizable = true; 657 } 658 if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces || UseSharedSpaces) { 659 resizable = false; 660 } 661 return new Dictionary(this, size, resizable); 662 } 663 664 // Tell the GC to keep this klass alive while iterating ClassLoaderDataGraph 665 oop ClassLoaderData::holder_phantom() const { 666 // A klass that was previously considered dead can be looked up in the 667 // CLD/SD, and its _java_mirror or _class_loader can be stored in a root 668 // or a reachable object making it alive again. The SATB part of G1 needs 669 // to get notified about this potential resurrection, otherwise the marking 670 // might not find the object. 671 if (!_holder.is_null()) { // NULL class_loader 672 return _holder.resolve(); 673 } else { 674 return NULL; 675 } 676 } 677 678 // Unloading support 679 bool ClassLoaderData::is_alive() const { 680 bool alive = keep_alive() // null class loader and incomplete anonymous klasses. 681 || (_holder.peek() != NULL); // and not cleaned by the GC weak handle processing. 682 683 return alive; 684 } 685 686 class ReleaseKlassClosure: public KlassClosure { 687 private: 688 size_t _instance_class_released; 689 size_t _array_class_released; 690 public: 691 ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { } 692 693 size_t instance_class_released() const { return _instance_class_released; } 694 size_t array_class_released() const { return _array_class_released; } 695 696 void do_klass(Klass* k) { 697 if (k->is_array_klass()) { 698 _array_class_released ++; 699 } else { 700 assert(k->is_instance_klass(), "Must be"); 701 _instance_class_released ++; 702 InstanceKlass::release_C_heap_structures(InstanceKlass::cast(k)); 703 } 704 } 705 }; 706 707 ClassLoaderData::~ClassLoaderData() { 708 // Release C heap structures for all the classes. 709 ReleaseKlassClosure cl; 710 classes_do(&cl); 711 712 ClassLoaderDataGraph::dec_array_classes(cl.array_class_released()); 713 ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released()); 714 715 // Release the WeakHandle 716 _holder.release(); 717 718 // Release C heap allocated hashtable for all the packages. 719 if (_packages != NULL) { 720 // Destroy the table itself 721 delete _packages; 722 _packages = NULL; 723 } 724 725 // Release C heap allocated hashtable for all the modules. 726 if (_modules != NULL) { 727 // Destroy the table itself 728 delete _modules; 729 _modules = NULL; 730 } 731 732 // Release C heap allocated hashtable for the dictionary 733 if (_dictionary != NULL) { 734 // Destroy the table itself 735 delete _dictionary; 736 _dictionary = NULL; 737 } 738 739 if (_unnamed_module != NULL) { 740 _unnamed_module->delete_unnamed_module(); 741 _unnamed_module = NULL; 742 } 743 744 // release the metaspace 745 ClassLoaderMetaspace *m = _metaspace; 746 if (m != NULL) { 747 _metaspace = NULL; 748 delete m; 749 } 750 // Clear all the JNI handles for methods 751 // These aren't deallocated and are going to look like a leak, but that's 752 // needed because we can't really get rid of jmethodIDs because we don't 753 // know when native code is going to stop using them. The spec says that 754 // they're "invalid" but existing programs likely rely on their being 755 // NULL after class unloading. 756 if (_jmethod_ids != NULL) { 757 Method::clear_jmethod_ids(this); 758 } 759 // Delete lock 760 delete _metaspace_lock; 761 762 // Delete free list 763 if (_deallocate_list != NULL) { 764 delete _deallocate_list; 765 } 766 } 767 768 // Returns true if this class loader data is for the app class loader 769 // or a user defined system class loader. (Note that the class loader 770 // data may be anonymous.) 771 bool ClassLoaderData::is_system_class_loader_data() const { 772 return SystemDictionary::is_system_class_loader(class_loader()); 773 } 774 775 // Returns true if this class loader data is for the platform class loader. 776 // (Note that the class loader data may be anonymous.) 777 bool ClassLoaderData::is_platform_class_loader_data() const { 778 return SystemDictionary::is_platform_class_loader(class_loader()); 779 } 780 781 // Returns true if the class loader for this class loader data is one of 782 // the 3 builtin (boot application/system or platform) class loaders, 783 // including a user-defined system class loader. Note that if the class 784 // loader data is for an anonymous class then it may get freed by a GC 785 // even if its class loader is one of these loaders. 786 bool ClassLoaderData::is_builtin_class_loader_data() const { 787 return (is_boot_class_loader_data() || 788 SystemDictionary::is_system_class_loader(class_loader()) || 789 SystemDictionary::is_platform_class_loader(class_loader())); 790 } 791 792 // Returns true if this class loader data is a class loader data 793 // that is not ever freed by a GC. It must be one of the builtin 794 // class loaders and not anonymous. 795 bool ClassLoaderData::is_permanent_class_loader_data() const { 796 return is_builtin_class_loader_data() && !is_anonymous(); 797 } 798 799 ClassLoaderMetaspace* ClassLoaderData::metaspace_non_null() { 800 // If the metaspace has not been allocated, create a new one. Might want 801 // to create smaller arena for Reflection class loaders also. 802 // The reason for the delayed allocation is because some class loaders are 803 // simply for delegating with no metadata of their own. 804 // Lock-free access requires load_acquire. 805 ClassLoaderMetaspace* metaspace = OrderAccess::load_acquire(&_metaspace); 806 if (metaspace == NULL) { 807 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); 808 // Check if _metaspace got allocated while we were waiting for this lock. 809 if ((metaspace = _metaspace) == NULL) { 810 if (this == the_null_class_loader_data()) { 811 assert (class_loader() == NULL, "Must be"); 812 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::BootMetaspaceType); 813 } else if (is_anonymous()) { 814 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); 815 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 816 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); 817 } else { 818 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); 819 } 820 // Ensure _metaspace is stable, since it is examined without a lock 821 OrderAccess::release_store(&_metaspace, metaspace); 822 } 823 } 824 return metaspace; 825 } 826 827 OopHandle ClassLoaderData::add_handle(Handle h) { 828 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 829 record_modified_oops(); 830 return OopHandle(_handles.add(h())); 831 } 832 833 void ClassLoaderData::remove_handle(OopHandle h) { 834 assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading"); 835 oop* ptr = h.ptr_raw(); 836 if (ptr != NULL) { 837 assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr)); 838 NativeAccess<>::oop_store(ptr, oop(NULL)); 839 } 840 } 841 842 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) { 843 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 844 if (dest.resolve() != NULL) { 845 return; 846 } else { 847 dest = _handles.add(h()); 848 } 849 } 850 851 // Add this metadata pointer to be freed when it's safe. This is only during 852 // class unloading because Handles might point to this metadata field. 853 void ClassLoaderData::add_to_deallocate_list(Metadata* m) { 854 // Metadata in shared region isn't deleted. 855 if (!m->is_shared()) { 856 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 857 if (_deallocate_list == NULL) { 858 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); 859 } 860 _deallocate_list->append_if_missing(m); 861 } 862 } 863 864 // Deallocate free metadata on the free list. How useful the PermGen was! 865 void ClassLoaderData::free_deallocate_list() { 866 // Don't need lock, at safepoint 867 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 868 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading"); 869 if (_deallocate_list == NULL) { 870 return; 871 } 872 // Go backwards because this removes entries that are freed. 873 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 874 Metadata* m = _deallocate_list->at(i); 875 if (!m->on_stack()) { 876 _deallocate_list->remove_at(i); 877 // There are only three types of metadata that we deallocate directly. 878 // Cast them so they can be used by the template function. 879 if (m->is_method()) { 880 MetadataFactory::free_metadata(this, (Method*)m); 881 } else if (m->is_constantPool()) { 882 MetadataFactory::free_metadata(this, (ConstantPool*)m); 883 } else if (m->is_klass()) { 884 MetadataFactory::free_metadata(this, (InstanceKlass*)m); 885 } else { 886 ShouldNotReachHere(); 887 } 888 } else { 889 // Metadata is alive. 890 // If scratch_class is on stack then it shouldn't be on this list! 891 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), 892 "scratch classes on this list should be dead"); 893 // Also should assert that other metadata on the list was found in handles. 894 } 895 } 896 } 897 898 // This is distinct from free_deallocate_list. For class loader data that are 899 // unloading, this frees the C heap memory for items on the list, and unlinks 900 // scratch or error classes so that unloading events aren't triggered for these 901 // classes. The metadata is removed with the unloading metaspace. 902 // There isn't C heap memory allocated for methods, so nothing is done for them. 903 void ClassLoaderData::unload_deallocate_list() { 904 // Don't need lock, at safepoint 905 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 906 assert(is_unloading(), "only called for ClassLoaderData that are unloading"); 907 if (_deallocate_list == NULL) { 908 return; 909 } 910 // Go backwards because this removes entries that are freed. 911 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 912 Metadata* m = _deallocate_list->at(i); 913 assert (!m->on_stack(), "wouldn't be unloading if this were so"); 914 _deallocate_list->remove_at(i); 915 if (m->is_constantPool()) { 916 ((ConstantPool*)m)->release_C_heap_structures(); 917 } else if (m->is_klass()) { 918 InstanceKlass* ik = (InstanceKlass*)m; 919 // also releases ik->constants() C heap memory 920 InstanceKlass::release_C_heap_structures(ik); 921 // Remove the class so unloading events aren't triggered for 922 // this class (scratch or error class) in do_unloading(). 923 remove_class(ik); 924 } 925 } 926 } 927 928 // These anonymous class loaders are to contain classes used for JSR292 929 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(Handle loader) { 930 // Add a new class loader data to the graph. 931 return ClassLoaderDataGraph::add(loader, true); 932 } 933 934 // Caller needs ResourceMark 935 // If the class loader's _name has not been explicitly set, the class loader's 936 // qualified class name is returned. 937 const char* ClassLoaderData::loader_name() const { 938 if (_class_loader_klass == NULL) { 939 return BOOTSTRAP_LOADER_NAME; 940 } else if (_name != NULL) { 941 return _name->as_C_string(); 942 } else { 943 return _class_loader_klass->external_name(); 944 } 945 } 946 947 // Caller needs ResourceMark 948 // Format of the _name_and_id is as follows: 949 // If the defining loader has a name explicitly set then '<loader-name>' @<id> 950 // If the defining loader has no name then <qualified-class-name> @<id> 951 // If built-in loader, then omit '@<id>' as there is only one instance. 952 const char* ClassLoaderData::loader_name_and_id() const { 953 if (_class_loader_klass == NULL) { 954 return "'" BOOTSTRAP_LOADER_NAME "'"; 955 } else if (_name_and_id != NULL) { 956 return _name_and_id->as_C_string(); 957 } else { 958 // May be called in a race before _name_and_id is initialized. 959 return _class_loader_klass->external_name(); 960 } 961 } 962 963 void ClassLoaderData::print_value_on(outputStream* out) const { 964 if (!is_unloading() && class_loader() != NULL) { 965 out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this)); 966 class_loader()->print_value_on(out); // includes loader_name_and_id() and address of class loader instance 967 } else { 968 // loader data: 0xsomeaddr of 'bootstrap' 969 out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name_and_id()); 970 } 971 if (is_anonymous()) { 972 out->print(" anonymous"); 973 } 974 } 975 976 #ifndef PRODUCT 977 void ClassLoaderData::print_on(outputStream* out) const { 978 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: %s {", 979 p2i(this), p2i(_class_loader.ptr_raw()), loader_name_and_id()); 980 if (is_anonymous()) out->print(" anonymous"); 981 if (claimed()) out->print(" claimed"); 982 if (is_unloading()) out->print(" unloading"); 983 out->print(" metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null())); 984 985 if (_jmethod_ids != NULL) { 986 Method::print_jmethod_ids(this, out); 987 } 988 out->print(" handles count %d", _handles.count()); 989 out->print(" dependencies %d", _dependency_count); 990 out->print_cr("}"); 991 } 992 #endif // PRODUCT 993 994 void ClassLoaderData::verify() { 995 assert_locked_or_safepoint(_metaspace_lock); 996 oop cl = class_loader(); 997 998 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same"); 999 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be"); 1000 1001 // Verify the integrity of the allocated space. 1002 if (metaspace_or_null() != NULL) { 1003 metaspace_or_null()->verify(); 1004 } 1005 1006 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 1007 guarantee(k->class_loader_data() == this, "Must be the same"); 1008 k->verify(); 1009 assert(k != k->next_link(), "no loops!"); 1010 } 1011 } 1012 1013 bool ClassLoaderData::contains_klass(Klass* klass) { 1014 // Lock-free access requires load_acquire 1015 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 1016 if (k == klass) return true; 1017 } 1018 return false; 1019 } 1020 1021 1022 // GC root of class loader data created. 1023 ClassLoaderData* ClassLoaderDataGraph::_head = NULL; 1024 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL; 1025 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL; 1026 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL; 1027 1028 bool ClassLoaderDataGraph::_should_purge = false; 1029 bool ClassLoaderDataGraph::_metaspace_oom = false; 1030 1031 // Add a new class loader data node to the list. Assign the newly created 1032 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field 1033 ClassLoaderData* ClassLoaderDataGraph::add_to_graph(Handle loader, bool is_anonymous) { 1034 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the 1035 // ClassLoaderData in the graph since the CLD 1036 // contains oops in _handles that must be walked. 1037 1038 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous); 1039 1040 if (!is_anonymous) { 1041 // First, Atomically set it 1042 ClassLoaderData* old = java_lang_ClassLoader::cmpxchg_loader_data(cld, loader(), NULL); 1043 if (old != NULL) { 1044 delete cld; 1045 // Returns the data. 1046 return old; 1047 } 1048 } 1049 1050 // We won the race, and therefore the task of adding the data to the list of 1051 // class loader data 1052 ClassLoaderData** list_head = &_head; 1053 ClassLoaderData* next = _head; 1054 1055 do { 1056 cld->set_next(next); 1057 ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next); 1058 if (exchanged == next) { 1059 LogTarget(Debug, class, loader, data) lt; 1060 if (lt.is_enabled()) { 1061 ResourceMark rm; 1062 LogStream ls(lt); 1063 ls.print("create "); 1064 cld->print_value_on(&ls); 1065 ls.cr(); 1066 } 1067 return cld; 1068 } 1069 next = exchanged; 1070 } while (true); 1071 } 1072 1073 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous) { 1074 ClassLoaderData* loader_data = add_to_graph(loader, is_anonymous); 1075 // Initialize _name and _name_and_id after the loader data is added to the 1076 // CLDG because adding the Symbol for _name and _name_and_id might safepoint. 1077 if (loader.not_null()) { 1078 loader_data->initialize_name(loader); 1079 } 1080 return loader_data; 1081 } 1082 1083 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) { 1084 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1085 cld->oops_do(f, must_claim); 1086 } 1087 } 1088 1089 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) { 1090 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1091 if (cld->keep_alive()) { 1092 cld->oops_do(f, must_claim); 1093 } 1094 } 1095 } 1096 1097 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) { 1098 if (ClassUnloading) { 1099 keep_alive_oops_do(f, must_claim); 1100 } else { 1101 oops_do(f, must_claim); 1102 } 1103 } 1104 1105 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) { 1106 for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) { 1107 cl->do_cld(cld); 1108 } 1109 } 1110 1111 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { 1112 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1113 // Only walk the head until any clds not purged from prior unloading 1114 // (CMS doesn't purge right away). 1115 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1116 assert(cld->is_unloading(), "invariant"); 1117 cl->do_cld(cld); 1118 } 1119 } 1120 1121 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { 1122 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { 1123 CLDClosure* closure = cld->keep_alive() ? strong : weak; 1124 if (closure != NULL) { 1125 closure->do_cld(cld); 1126 } 1127 } 1128 } 1129 1130 void ClassLoaderDataGraph::keep_alive_cld_do(CLDClosure* cl) { 1131 roots_cld_do(cl, NULL); 1132 } 1133 1134 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) { 1135 if (ClassUnloading) { 1136 keep_alive_cld_do(cl); 1137 } else { 1138 cld_do(cl); 1139 } 1140 } 1141 1142 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) { 1143 Thread* thread = Thread::current(); 1144 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1145 Handle holder(thread, cld->holder_phantom()); 1146 cld->classes_do(klass_closure); 1147 } 1148 } 1149 1150 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) { 1151 Thread* thread = Thread::current(); 1152 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1153 Handle holder(thread, cld->holder_phantom()); 1154 cld->classes_do(f); 1155 } 1156 } 1157 1158 void ClassLoaderDataGraph::methods_do(void f(Method*)) { 1159 Thread* thread = Thread::current(); 1160 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1161 Handle holder(thread, cld->holder_phantom()); 1162 cld->methods_do(f); 1163 } 1164 } 1165 1166 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) { 1167 assert_locked_or_safepoint(Module_lock); 1168 Thread* thread = Thread::current(); 1169 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1170 Handle holder(thread, cld->holder_phantom()); 1171 cld->modules_do(f); 1172 } 1173 } 1174 1175 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) { 1176 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1177 // Only walk the head until any clds not purged from prior unloading 1178 // (CMS doesn't purge right away). 1179 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1180 assert(cld->is_unloading(), "invariant"); 1181 cld->modules_do(f); 1182 } 1183 } 1184 1185 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) { 1186 assert_locked_or_safepoint(Module_lock); 1187 Thread* thread = Thread::current(); 1188 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1189 Handle holder(thread, cld->holder_phantom()); 1190 cld->packages_do(f); 1191 } 1192 } 1193 1194 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) { 1195 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1196 // Only walk the head until any clds not purged from prior unloading 1197 // (CMS doesn't purge right away). 1198 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1199 assert(cld->is_unloading(), "invariant"); 1200 cld->packages_do(f); 1201 } 1202 } 1203 1204 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) { 1205 Thread* thread = Thread::current(); 1206 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1207 Handle holder(thread, cld->holder_phantom()); 1208 cld->loaded_classes_do(klass_closure); 1209 } 1210 } 1211 1212 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) { 1213 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1214 // Only walk the head until any clds not purged from prior unloading 1215 // (CMS doesn't purge right away). 1216 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1217 assert(cld->is_unloading(), "invariant"); 1218 cld->classes_do(f); 1219 } 1220 } 1221 1222 #define FOR_ALL_DICTIONARY(X) for (ClassLoaderData* X = _head; X != NULL; X = X->next()) \ 1223 if (X->dictionary() != NULL) 1224 1225 // Walk classes in the loaded class dictionaries in various forms. 1226 // Only walks the classes defined in this class loader. 1227 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) { 1228 Thread* thread = Thread::current(); 1229 FOR_ALL_DICTIONARY(cld) { 1230 Handle holder(thread, cld->holder_phantom()); 1231 cld->dictionary()->classes_do(f); 1232 } 1233 } 1234 1235 // Only walks the classes defined in this class loader. 1236 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) { 1237 Thread* thread = Thread::current(); 1238 FOR_ALL_DICTIONARY(cld) { 1239 Handle holder(thread, cld->holder_phantom()); 1240 cld->dictionary()->classes_do(f, CHECK); 1241 } 1242 } 1243 1244 // Walks all entries in the dictionary including entries initiated by this class loader. 1245 void ClassLoaderDataGraph::dictionary_all_entries_do(void f(InstanceKlass*, ClassLoaderData*)) { 1246 Thread* thread = Thread::current(); 1247 FOR_ALL_DICTIONARY(cld) { 1248 Handle holder(thread, cld->holder_phantom()); 1249 cld->dictionary()->all_entries_do(f); 1250 } 1251 } 1252 1253 void ClassLoaderDataGraph::verify_dictionary() { 1254 FOR_ALL_DICTIONARY(cld) { 1255 cld->dictionary()->verify(); 1256 } 1257 } 1258 1259 void ClassLoaderDataGraph::print_dictionary(outputStream* st) { 1260 FOR_ALL_DICTIONARY(cld) { 1261 st->print("Dictionary for "); 1262 cld->print_value_on(st); 1263 st->cr(); 1264 cld->dictionary()->print_on(st); 1265 st->cr(); 1266 } 1267 } 1268 1269 void ClassLoaderDataGraph::print_dictionary_statistics(outputStream* st) { 1270 FOR_ALL_DICTIONARY(cld) { 1271 ResourceMark rm; 1272 stringStream tempst; 1273 tempst.print("System Dictionary for %s class loader", cld->loader_name_and_id()); 1274 cld->dictionary()->print_table_statistics(st, tempst.as_string()); 1275 } 1276 } 1277 1278 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() { 1279 assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?"); 1280 1281 GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>(); 1282 1283 // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true); 1284 ClassLoaderData* curr = _head; 1285 while (curr != _saved_head) { 1286 if (!curr->claimed()) { 1287 array->push(curr); 1288 LogTarget(Debug, class, loader, data) lt; 1289 if (lt.is_enabled()) { 1290 LogStream ls(lt); 1291 ls.print("found new CLD: "); 1292 curr->print_value_on(&ls); 1293 ls.cr(); 1294 } 1295 } 1296 1297 curr = curr->_next; 1298 } 1299 1300 return array; 1301 } 1302 1303 #ifndef PRODUCT 1304 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) { 1305 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1306 if (loader_data == data) { 1307 return true; 1308 } 1309 } 1310 1311 return false; 1312 } 1313 #endif // PRODUCT 1314 1315 #if INCLUDE_JFR 1316 static Ticks class_unload_time; 1317 static void post_class_unload_event(Klass* const k) { 1318 assert(k != NULL, "invariant"); 1319 EventClassUnload event(UNTIMED); 1320 event.set_endtime(class_unload_time); 1321 event.set_unloadedClass(k); 1322 event.set_definingClassLoader(k->class_loader_data()); 1323 event.commit(); 1324 } 1325 1326 static void post_class_unload_events() { 1327 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1328 if (Jfr::is_enabled()) { 1329 if (EventClassUnload::is_enabled()) { 1330 class_unload_time = Ticks::now(); 1331 ClassLoaderDataGraph::classes_unloading_do(&post_class_unload_event); 1332 } 1333 Jfr::on_unloading_classes(); 1334 } 1335 } 1336 #endif // INCLUDE_JFR 1337 1338 // Move class loader data from main list to the unloaded list for unloading 1339 // and deallocation later. 1340 bool ClassLoaderDataGraph::do_unloading(bool clean_previous_versions) { 1341 1342 ClassLoaderData* data = _head; 1343 ClassLoaderData* prev = NULL; 1344 bool seen_dead_loader = false; 1345 uint loaders_processed = 0; 1346 uint loaders_removed = 0; 1347 1348 // Mark metadata seen on the stack only so we can delete unneeded entries. 1349 // Only walk all metadata, including the expensive code cache walk, for Full GC 1350 // and only if class redefinition and if there's previous versions of 1351 // Klasses to delete. 1352 bool walk_all_metadata = clean_previous_versions && 1353 JvmtiExport::has_redefined_a_class() && 1354 InstanceKlass::has_previous_versions_and_reset(); 1355 MetadataOnStackMark md_on_stack(walk_all_metadata); 1356 1357 // Save previous _unloading pointer for CMS which may add to unloading list before 1358 // purging and we don't want to rewalk the previously unloaded class loader data. 1359 _saved_unloading = _unloading; 1360 1361 data = _head; 1362 while (data != NULL) { 1363 if (data->is_alive()) { 1364 // clean metaspace 1365 if (walk_all_metadata) { 1366 data->classes_do(InstanceKlass::purge_previous_versions); 1367 } 1368 data->free_deallocate_list(); 1369 prev = data; 1370 data = data->next(); 1371 loaders_processed++; 1372 continue; 1373 } 1374 seen_dead_loader = true; 1375 loaders_removed++; 1376 ClassLoaderData* dead = data; 1377 dead->unload(); 1378 data = data->next(); 1379 // Remove from loader list. 1380 // This class loader data will no longer be found 1381 // in the ClassLoaderDataGraph. 1382 if (prev != NULL) { 1383 prev->set_next(data); 1384 } else { 1385 assert(dead == _head, "sanity check"); 1386 _head = data; 1387 } 1388 dead->set_next(_unloading); 1389 _unloading = dead; 1390 } 1391 1392 if (seen_dead_loader) { 1393 data = _head; 1394 while (data != NULL) { 1395 // Remove entries in the dictionary of live class loader that have 1396 // initiated loading classes in a dead class loader. 1397 if (data->dictionary() != NULL) { 1398 data->dictionary()->do_unloading(); 1399 } 1400 // Walk a ModuleEntry's reads, and a PackageEntry's exports 1401 // lists to determine if there are modules on those lists that are now 1402 // dead and should be removed. A module's life cycle is equivalent 1403 // to its defining class loader's life cycle. Since a module is 1404 // considered dead if its class loader is dead, these walks must 1405 // occur after each class loader's aliveness is determined. 1406 if (data->packages() != NULL) { 1407 data->packages()->purge_all_package_exports(); 1408 } 1409 if (data->modules_defined()) { 1410 data->modules()->purge_all_module_reads(); 1411 } 1412 data = data->next(); 1413 } 1414 JFR_ONLY(post_class_unload_events();) 1415 } 1416 1417 log_debug(class, loader, data)("do_unloading: loaders processed %u, loaders removed %u", loaders_processed, loaders_removed); 1418 1419 return seen_dead_loader; 1420 } 1421 1422 void ClassLoaderDataGraph::purge() { 1423 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1424 ClassLoaderData* list = _unloading; 1425 _unloading = NULL; 1426 ClassLoaderData* next = list; 1427 bool classes_unloaded = false; 1428 while (next != NULL) { 1429 ClassLoaderData* purge_me = next; 1430 next = purge_me->next(); 1431 delete purge_me; 1432 classes_unloaded = true; 1433 } 1434 if (classes_unloaded) { 1435 Metaspace::purge(); 1436 set_metaspace_oom(false); 1437 } 1438 } 1439 1440 int ClassLoaderDataGraph::resize_if_needed() { 1441 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1442 int resized = 0; 1443 if (Dictionary::does_any_dictionary_needs_resizing()) { 1444 FOR_ALL_DICTIONARY(cld) { 1445 if (cld->dictionary()->resize_if_needed()) { 1446 resized++; 1447 } 1448 } 1449 } 1450 return resized; 1451 } 1452 1453 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic() 1454 : _next_klass(NULL) { 1455 ClassLoaderData* cld = ClassLoaderDataGraph::_head; 1456 Klass* klass = NULL; 1457 1458 // Find the first klass in the CLDG. 1459 while (cld != NULL) { 1460 assert_locked_or_safepoint(cld->metaspace_lock()); 1461 klass = cld->_klasses; 1462 if (klass != NULL) { 1463 _next_klass = klass; 1464 return; 1465 } 1466 cld = cld->next(); 1467 } 1468 } 1469 1470 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) { 1471 Klass* next = klass->next_link(); 1472 if (next != NULL) { 1473 return next; 1474 } 1475 1476 // No more klasses in the current CLD. Time to find a new CLD. 1477 ClassLoaderData* cld = klass->class_loader_data(); 1478 assert_locked_or_safepoint(cld->metaspace_lock()); 1479 while (next == NULL) { 1480 cld = cld->next(); 1481 if (cld == NULL) { 1482 break; 1483 } 1484 next = cld->_klasses; 1485 } 1486 1487 return next; 1488 } 1489 1490 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() { 1491 Klass* head = _next_klass; 1492 1493 while (head != NULL) { 1494 Klass* next = next_klass_in_cldg(head); 1495 1496 Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head); 1497 1498 if (old_head == head) { 1499 return head; // Won the CAS. 1500 } 1501 1502 head = old_head; 1503 } 1504 1505 // Nothing more for the iterator to hand out. 1506 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head)); 1507 return NULL; 1508 } 1509 1510 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() { 1511 _data = ClassLoaderDataGraph::_head; 1512 } 1513 1514 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {} 1515 1516 #ifndef PRODUCT 1517 // callable from debugger 1518 extern "C" int print_loader_data_graph() { 1519 ResourceMark rm; 1520 ClassLoaderDataGraph::print_on(tty); 1521 return 0; 1522 } 1523 1524 void ClassLoaderDataGraph::verify() { 1525 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1526 data->verify(); 1527 } 1528 } 1529 1530 void ClassLoaderDataGraph::print_on(outputStream * const out) { 1531 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1532 data->print_on(out); 1533 } 1534 } 1535 #endif // PRODUCT