1 /* 2 * Copyright (c) 2012, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 // A ClassLoaderData identifies the full set of class types that a class 26 // loader's name resolution strategy produces for a given configuration of the 27 // class loader. 28 // Class types in the ClassLoaderData may be defined by from class file binaries 29 // provided by the class loader, or from other class loader it interacts with 30 // according to its name resolution strategy. 31 // 32 // Class loaders that implement a deterministic name resolution strategy 33 // (including with respect to their delegation behavior), such as the boot, the 34 // platform, and the system loaders of the JDK's built-in class loader 35 // hierarchy, always produce the same linkset for a given configuration. 36 // 37 // ClassLoaderData carries information related to a linkset (e.g., 38 // metaspace holding its klass definitions). 39 // The System Dictionary and related data structures (e.g., placeholder table, 40 // loader constraints table) as well as the runtime representation of classes 41 // only reference ClassLoaderData. 42 // 43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that 44 // that represent the loader's "linking domain" in the JVM. 45 // 46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData, 47 // the singleton class the_null_class_loader_data(). 48 49 #include "precompiled.hpp" 50 #include "classfile/classLoaderData.hpp" 51 #include "classfile/classLoaderData.inline.hpp" 52 #include "classfile/dictionary.hpp" 53 #include "classfile/javaClasses.hpp" 54 #include "classfile/metadataOnStackMark.hpp" 55 #include "classfile/moduleEntry.hpp" 56 #include "classfile/packageEntry.hpp" 57 #include "classfile/systemDictionary.hpp" 58 #include "code/codeCache.hpp" 59 #include "gc/shared/gcLocker.hpp" 60 #include "logging/log.hpp" 61 #include "memory/metadataFactory.hpp" 62 #include "memory/metaspaceShared.hpp" 63 #include "memory/oopFactory.hpp" 64 #include "memory/resourceArea.hpp" 65 #include "oops/objArrayOop.inline.hpp" 66 #include "oops/oop.inline.hpp" 67 #include "runtime/atomic.hpp" 68 #include "runtime/javaCalls.hpp" 69 #include "runtime/jniHandles.hpp" 70 #include "runtime/mutex.hpp" 71 #include "runtime/orderAccess.hpp" 72 #include "runtime/safepoint.hpp" 73 #include "runtime/synchronizer.hpp" 74 #include "utilities/growableArray.hpp" 75 #include "utilities/macros.hpp" 76 #include "utilities/ostream.hpp" 77 #if INCLUDE_TRACE 78 #include "trace/tracing.hpp" 79 #endif 80 81 // helper function to avoid in-line casts 82 template <typename T> static T* load_ptr_acquire(T* volatile *p) { 83 return static_cast<T*>(OrderAccess::load_ptr_acquire(p)); 84 } 85 86 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL; 87 88 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) : 89 _class_loader(h_class_loader()), 90 _is_anonymous(is_anonymous), 91 // An anonymous class loader data doesn't have anything to keep 92 // it from being unloaded during parsing of the anonymous class. 93 // The null-class-loader should always be kept alive. 94 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0), 95 _metaspace(NULL), _unloading(false), _klasses(NULL), 96 _modules(NULL), _packages(NULL), _dictionary(NULL), 97 _claimed(0), _jmethod_ids(NULL), _handles(), _deallocate_list(NULL), 98 _next(NULL), _dependencies(dependencies), 99 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true, 100 Monitor::_safepoint_check_never)) { 101 102 // A ClassLoaderData created solely for an anonymous class should never have a 103 // ModuleEntryTable or PackageEntryTable created for it. The defining package 104 // and module for an anonymous class will be found in its host class. 105 if (!is_anonymous) { 106 _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size); 107 if (h_class_loader.is_null()) { 108 // Create unnamed module for boot loader 109 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this); 110 } else { 111 // Create unnamed module for all other loaders 112 _unnamed_module = ModuleEntry::create_unnamed_module(this); 113 } 114 } else { 115 _unnamed_module = NULL; 116 } 117 TRACE_INIT_ID(this); 118 } 119 120 void ClassLoaderData::init_dependencies(TRAPS) { 121 assert(!Universe::is_fully_initialized(), "should only be called when initializing"); 122 assert(is_the_null_class_loader_data(), "should only call this for the null class loader"); 123 _dependencies.init(CHECK); 124 } 125 126 void ClassLoaderData::Dependencies::init(TRAPS) { 127 // Create empty dependencies array to add to. CMS requires this to be 128 // an oop so that it can track additions via card marks. We think. 129 _list_head = oopFactory::new_objectArray(2, CHECK); 130 } 131 132 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() { 133 Chunk* c = _head; 134 while (c != NULL) { 135 Chunk* next = c->_next; 136 delete c; 137 c = next; 138 } 139 } 140 141 oop* ClassLoaderData::ChunkedHandleList::add(oop o) { 142 if (_head == NULL || _head->_size == Chunk::CAPACITY) { 143 Chunk* next = new Chunk(_head); 144 OrderAccess::release_store_ptr(&_head, next); 145 } 146 oop* handle = &_head->_data[_head->_size]; 147 *handle = o; 148 OrderAccess::release_store(&_head->_size, _head->_size + 1); 149 return handle; 150 } 151 152 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) { 153 for (juint i = 0; i < size; i++) { 154 if (c->_data[i] != NULL) { 155 f->do_oop(&c->_data[i]); 156 } 157 } 158 } 159 160 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) { 161 Chunk* head = (Chunk*) OrderAccess::load_ptr_acquire(&_head); 162 if (head != NULL) { 163 // Must be careful when reading size of head 164 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size)); 165 for (Chunk* c = head->_next; c != NULL; c = c->_next) { 166 oops_do_chunk(f, c, c->_size); 167 } 168 } 169 } 170 171 #ifdef ASSERT 172 class VerifyContainsOopClosure : public OopClosure { 173 oop* _target; 174 bool _found; 175 176 public: 177 VerifyContainsOopClosure(oop* target) : _target(target), _found(false) {} 178 179 void do_oop(oop* p) { 180 if (p == _target) { 181 _found = true; 182 } 183 } 184 185 void do_oop(narrowOop* p) { 186 // The ChunkedHandleList should not contain any narrowOop 187 ShouldNotReachHere(); 188 } 189 190 bool found() const { 191 return _found; 192 } 193 }; 194 195 bool ClassLoaderData::ChunkedHandleList::contains(oop* p) { 196 VerifyContainsOopClosure cl(p); 197 oops_do(&cl); 198 return cl.found(); 199 } 200 #endif 201 202 bool ClassLoaderData::claim() { 203 if (_claimed == 1) { 204 return false; 205 } 206 207 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0; 208 } 209 210 // Anonymous classes have their own ClassLoaderData that is marked to keep alive 211 // while the class is being parsed, and if the class appears on the module fixup list. 212 // Due to the uniqueness that no other class shares the anonymous class' name or 213 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while 214 // it is being defined, therefore _keep_alive is not volatile or atomic. 215 void ClassLoaderData::inc_keep_alive() { 216 if (is_anonymous()) { 217 assert(_keep_alive >= 0, "Invalid keep alive increment count"); 218 _keep_alive++; 219 } 220 } 221 222 void ClassLoaderData::dec_keep_alive() { 223 if (is_anonymous()) { 224 assert(_keep_alive > 0, "Invalid keep alive decrement count"); 225 _keep_alive--; 226 } 227 } 228 229 void ClassLoaderData::oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) { 230 if (must_claim && !claim()) { 231 return; 232 } 233 234 f->do_oop(&_class_loader); 235 _dependencies.oops_do(f); 236 237 _handles.oops_do(f); 238 239 if (klass_closure != NULL) { 240 classes_do(klass_closure); 241 } 242 } 243 244 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) { 245 f->do_oop((oop*)&_list_head); 246 } 247 248 void ClassLoaderData::classes_do(KlassClosure* klass_closure) { 249 // Lock-free access requires load_ptr_acquire 250 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 251 klass_closure->do_klass(k); 252 assert(k != k->next_link(), "no loops!"); 253 } 254 } 255 256 void ClassLoaderData::classes_do(void f(Klass * const)) { 257 // Lock-free access requires load_ptr_acquire 258 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 259 f(k); 260 assert(k != k->next_link(), "no loops!"); 261 } 262 } 263 264 void ClassLoaderData::methods_do(void f(Method*)) { 265 // Lock-free access requires load_ptr_acquire 266 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 267 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) { 268 InstanceKlass::cast(k)->methods_do(f); 269 } 270 } 271 } 272 273 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) { 274 // Lock-free access requires load_ptr_acquire 275 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 276 // Do not filter ArrayKlass oops here... 277 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) { 278 klass_closure->do_klass(k); 279 } 280 } 281 } 282 283 void ClassLoaderData::classes_do(void f(InstanceKlass*)) { 284 // Lock-free access requires load_ptr_acquire 285 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 286 if (k->is_instance_klass()) { 287 f(InstanceKlass::cast(k)); 288 } 289 assert(k != k->next_link(), "no loops!"); 290 } 291 } 292 293 void ClassLoaderData::modules_do(void f(ModuleEntry*)) { 294 assert_locked_or_safepoint(Module_lock); 295 if (_unnamed_module != NULL) { 296 f(_unnamed_module); 297 } 298 if (_modules != NULL) { 299 for (int i = 0; i < _modules->table_size(); i++) { 300 for (ModuleEntry* entry = _modules->bucket(i); 301 entry != NULL; 302 entry = entry->next()) { 303 f(entry); 304 } 305 } 306 } 307 } 308 309 void ClassLoaderData::packages_do(void f(PackageEntry*)) { 310 assert_locked_or_safepoint(Module_lock); 311 if (_packages != NULL) { 312 for (int i = 0; i < _packages->table_size(); i++) { 313 for (PackageEntry* entry = _packages->bucket(i); 314 entry != NULL; 315 entry = entry->next()) { 316 f(entry); 317 } 318 } 319 } 320 } 321 322 void ClassLoaderData::record_dependency(const Klass* k, TRAPS) { 323 assert(k != NULL, "invariant"); 324 325 ClassLoaderData * const from_cld = this; 326 ClassLoaderData * const to_cld = k->class_loader_data(); 327 328 // Dependency to the null class loader data doesn't need to be recorded 329 // because the null class loader data never goes away. 330 if (to_cld->is_the_null_class_loader_data()) { 331 return; 332 } 333 334 oop to; 335 if (to_cld->is_anonymous()) { 336 // Anonymous class dependencies are through the mirror. 337 to = k->java_mirror(); 338 } else { 339 to = to_cld->class_loader(); 340 341 // If from_cld is anonymous, even if it's class_loader is a parent of 'to' 342 // we still have to add it. The class_loader won't keep from_cld alive. 343 if (!from_cld->is_anonymous()) { 344 // Check that this dependency isn't from the same or parent class_loader 345 oop from = from_cld->class_loader(); 346 347 oop curr = from; 348 while (curr != NULL) { 349 if (curr == to) { 350 return; // this class loader is in the parent list, no need to add it. 351 } 352 curr = java_lang_ClassLoader::parent(curr); 353 } 354 } 355 } 356 357 // It's a dependency we won't find through GC, add it. This is relatively rare 358 // Must handle over GC point. 359 Handle dependency(THREAD, to); 360 from_cld->_dependencies.add(dependency, CHECK); 361 } 362 363 364 void ClassLoaderData::Dependencies::add(Handle dependency, TRAPS) { 365 // Check first if this dependency is already in the list. 366 // Save a pointer to the last to add to under the lock. 367 objArrayOop ok = _list_head; 368 objArrayOop last = NULL; 369 while (ok != NULL) { 370 last = ok; 371 if (ok->obj_at(0) == dependency()) { 372 // Don't need to add it 373 return; 374 } 375 ok = (objArrayOop)ok->obj_at(1); 376 } 377 378 // Must handle over GC points 379 assert (last != NULL, "dependencies should be initialized"); 380 objArrayHandle last_handle(THREAD, last); 381 382 // Create a new dependency node with fields for (class_loader or mirror, next) 383 objArrayOop deps = oopFactory::new_objectArray(2, CHECK); 384 deps->obj_at_put(0, dependency()); 385 386 // Must handle over GC points 387 objArrayHandle new_dependency(THREAD, deps); 388 389 // Add the dependency under lock 390 locked_add(last_handle, new_dependency, THREAD); 391 } 392 393 void ClassLoaderData::Dependencies::locked_add(objArrayHandle last_handle, 394 objArrayHandle new_dependency, 395 Thread* THREAD) { 396 397 // Have to lock and put the new dependency on the end of the dependency 398 // array so the card mark for CMS sees that this dependency is new. 399 // Can probably do this lock free with some effort. 400 ObjectLocker ol(Handle(THREAD, _list_head), THREAD); 401 402 oop loader_or_mirror = new_dependency->obj_at(0); 403 404 // Since the dependencies are only added, add to the end. 405 objArrayOop end = last_handle(); 406 objArrayOop last = NULL; 407 while (end != NULL) { 408 last = end; 409 // check again if another thread added it to the end. 410 if (end->obj_at(0) == loader_or_mirror) { 411 // Don't need to add it 412 return; 413 } 414 end = (objArrayOop)end->obj_at(1); 415 } 416 assert (last != NULL, "dependencies should be initialized"); 417 // fill in the first element with the oop in new_dependency. 418 if (last->obj_at(0) == NULL) { 419 last->obj_at_put(0, new_dependency->obj_at(0)); 420 } else { 421 last->obj_at_put(1, new_dependency()); 422 } 423 } 424 425 void ClassLoaderDataGraph::clear_claimed_marks() { 426 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 427 cld->clear_claimed(); 428 } 429 } 430 431 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) { 432 { 433 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 434 Klass* old_value = _klasses; 435 k->set_next_link(old_value); 436 // Link the new item into the list, making sure the linked class is stable 437 // since the list can be walked without a lock 438 OrderAccess::release_store_ptr(&_klasses, k); 439 } 440 441 if (publicize && k->class_loader_data() != NULL) { 442 ResourceMark rm; 443 log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: " 444 PTR_FORMAT " loader: " PTR_FORMAT " %s", 445 p2i(k), 446 k->external_name(), 447 p2i(k->class_loader_data()), 448 p2i((void *)k->class_loader()), 449 loader_name()); 450 } 451 } 452 453 // Class iterator used by the compiler. It gets some number of classes at 454 // a safepoint to decay invocation counters on the methods. 455 class ClassLoaderDataGraphKlassIteratorStatic { 456 ClassLoaderData* _current_loader_data = NULL; 457 Klass* _current_class_entry = NULL; 458 public: 459 460 InstanceKlass* try_get_next_class() { 461 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 462 while (true) { 463 464 if (_current_class_entry != NULL) { 465 Klass* k = _current_class_entry; 466 _current_class_entry = _current_class_entry->next_link(); 467 468 if (k->is_instance_klass()) { 469 InstanceKlass* ik = InstanceKlass::cast(k); 470 // Only return loaded classes 471 if (ik->is_loaded()) { 472 return ik; 473 } 474 } 475 } else { 476 // Go to next CLD 477 if (_current_loader_data != NULL) { 478 _current_loader_data = _current_loader_data->next(); 479 } 480 // Start at the beginning 481 if (_current_loader_data == NULL) { 482 _current_loader_data = ClassLoaderDataGraph::_head; 483 } 484 485 _current_class_entry = _current_loader_data->klasses(); 486 } 487 } 488 // never reached: an InstanceKlass should be returned above 489 } 490 491 // If the current class for the static iterator is a class being unloaded or 492 // deallocated, adjust the current class. 493 void adjust_saved_class(ClassLoaderData* cld) { 494 if (_current_loader_data == cld) { 495 _current_loader_data = cld->next(); 496 if (_current_loader_data != NULL) { 497 _current_class_entry = _current_loader_data->klasses(); 498 } // else try_get_next_class will start at the head 499 } 500 } 501 502 void adjust_saved_class(Klass* klass) { 503 if (_current_class_entry == klass) { 504 _current_class_entry = klass->next_link(); 505 } 506 } 507 }; 508 509 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator; 510 511 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() { 512 return static_klass_iterator.try_get_next_class(); 513 } 514 515 516 // Remove a klass from the _klasses list for scratch_class during redefinition 517 // or parsed class in the case of an error. 518 void ClassLoaderData::remove_class(Klass* scratch_class) { 519 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 520 521 // Adjust global class iterator. 522 static_klass_iterator.adjust_saved_class(scratch_class); 523 524 Klass* prev = NULL; 525 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 526 if (k == scratch_class) { 527 if (prev == NULL) { 528 _klasses = k->next_link(); 529 } else { 530 Klass* next = k->next_link(); 531 prev->set_next_link(next); 532 } 533 return; 534 } 535 prev = k; 536 assert(k != k->next_link(), "no loops!"); 537 } 538 ShouldNotReachHere(); // should have found this class!! 539 } 540 541 void ClassLoaderData::unload() { 542 _unloading = true; 543 544 // Tell serviceability tools these classes are unloading 545 classes_do(InstanceKlass::notify_unload_class); 546 547 if (log_is_enabled(Debug, class, loader, data)) { 548 ResourceMark rm; 549 outputStream* log = Log(class, loader, data)::debug_stream(); 550 log->print(": unload loader data " INTPTR_FORMAT, p2i(this)); 551 log->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()), 552 loader_name()); 553 if (is_anonymous()) { 554 log->print(" for anonymous class " INTPTR_FORMAT " ", p2i(_klasses)); 555 } 556 log->cr(); 557 } 558 559 // In some rare cases items added to this list will not be freed elsewhere. 560 // To keep it simple, just free everything in it here. 561 free_deallocate_list(); 562 563 // Clean up global class iterator for compiler 564 static_klass_iterator.adjust_saved_class(this); 565 } 566 567 ModuleEntryTable* ClassLoaderData::modules() { 568 // Lazily create the module entry table at first request. 569 // Lock-free access requires load_ptr_acquire. 570 ModuleEntryTable* modules = load_ptr_acquire(&_modules); 571 if (modules == NULL) { 572 MutexLocker m1(Module_lock); 573 // Check if _modules got allocated while we were waiting for this lock. 574 if ((modules = _modules) == NULL) { 575 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size); 576 577 { 578 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 579 // Ensure _modules is stable, since it is examined without a lock 580 OrderAccess::release_store_ptr(&_modules, modules); 581 } 582 } 583 } 584 return modules; 585 } 586 587 const int _boot_loader_dictionary_size = 1009; 588 const int _prime_array_size = 8; // array of primes for system dictionary size 589 const int _average_depth_goal = 3; // goal for lookup length 590 const int _primelist[_prime_array_size] = {107, 1009, 2017, 4049, 5051, 10103, 20201, 40423}; 591 592 // Calculate a "good" dictionary size based 593 // on predicted or current loaded classes count *per class loader* 594 // This size will be used for all class loaders if specified, 595 // except boot loader and reflection class loaders 596 static int calculate_dictionary_size(int classcount) { 597 static int newsize = 0; // only calculate once 598 if (newsize != 0) { 599 return newsize; 600 } 601 newsize = _primelist[0]; 602 if (classcount > 0 && !DumpSharedSpaces) { 603 int index = 0; 604 int desiredsize = classcount/_average_depth_goal; 605 for (newsize = _primelist[index]; index < _prime_array_size -1; 606 newsize = _primelist[++index]) { 607 if (desiredsize <= newsize) { 608 break; 609 } 610 } 611 } 612 return newsize; 613 } 614 615 Dictionary* ClassLoaderData::dictionary_or_null() { 616 return load_ptr_acquire(&_dictionary); 617 } 618 619 Dictionary* ClassLoaderData::dictionary() { 620 assert(!is_anonymous(), "anonymous class loader data do not have a dictionary"); 621 // Lazily create the dictionary, in the same way of lazily creating modules. 622 // Lock-free access requires load_ptr_acquire. 623 Dictionary* dictionary = load_ptr_acquire(&_dictionary); 624 if (dictionary == NULL) { 625 MutexLocker m1(SystemDictionary_lock); 626 // Check if _dictionary got allocated while we were waiting for this lock. 627 if ((dictionary = _dictionary) == NULL) { 628 int size; 629 if (this == the_null_class_loader_data()) { 630 size = _boot_loader_dictionary_size; 631 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 632 size = 1; // there's only one class in relection class loader and no initiated classes 633 } else { 634 size = calculate_dictionary_size(PredictedLoadedClassCount); 635 } 636 dictionary = new Dictionary(this, size); 637 // Ensure _dictionary is stable, since it is examined without a lock. 638 // Don't need metaspace_lock since SystemDictionary_lock is held. 639 OrderAccess::release_store_ptr(&_dictionary, dictionary); 640 } 641 } 642 return dictionary; 643 } 644 645 // Unloading support 646 oop ClassLoaderData::keep_alive_object() const { 647 assert_locked_or_safepoint(_metaspace_lock); 648 assert(!keep_alive(), "Don't use with CLDs that are artificially kept alive"); 649 return is_anonymous() ? _klasses->java_mirror() : class_loader(); 650 } 651 652 bool ClassLoaderData::is_alive(BoolObjectClosure* is_alive_closure) const { 653 bool alive = keep_alive() // null class loader and incomplete anonymous klasses. 654 || is_alive_closure->do_object_b(keep_alive_object()); 655 656 return alive; 657 } 658 659 ClassLoaderData::~ClassLoaderData() { 660 // Release C heap structures for all the classes. 661 classes_do(InstanceKlass::release_C_heap_structures); 662 663 // Release C heap allocated hashtable for all the packages. 664 if (_packages != NULL) { 665 // Destroy the table itself 666 delete _packages; 667 _packages = NULL; 668 } 669 670 // Release C heap allocated hashtable for all the modules. 671 if (_modules != NULL) { 672 // Destroy the table itself 673 delete _modules; 674 _modules = NULL; 675 } 676 677 // Release C heap allocated hashtable for the dictionary 678 if (_dictionary != NULL) { 679 // Destroy the table itself 680 delete _dictionary; 681 _dictionary = NULL; 682 } 683 684 if (_unnamed_module != NULL) { 685 _unnamed_module->delete_unnamed_module(); 686 _unnamed_module = NULL; 687 } 688 689 // release the metaspace 690 Metaspace *m = _metaspace; 691 if (m != NULL) { 692 _metaspace = NULL; 693 delete m; 694 } 695 // Clear all the JNI handles for methods 696 // These aren't deallocated and are going to look like a leak, but that's 697 // needed because we can't really get rid of jmethodIDs because we don't 698 // know when native code is going to stop using them. The spec says that 699 // they're "invalid" but existing programs likely rely on their being 700 // NULL after class unloading. 701 if (_jmethod_ids != NULL) { 702 Method::clear_jmethod_ids(this); 703 } 704 // Delete lock 705 delete _metaspace_lock; 706 707 // Delete free list 708 if (_deallocate_list != NULL) { 709 delete _deallocate_list; 710 } 711 } 712 713 // Returns true if this class loader data is for the system class loader. 714 bool ClassLoaderData::is_system_class_loader_data() const { 715 return SystemDictionary::is_system_class_loader(class_loader()); 716 } 717 718 // Returns true if this class loader data is for the platform class loader. 719 bool ClassLoaderData::is_platform_class_loader_data() const { 720 return SystemDictionary::is_platform_class_loader(class_loader()); 721 } 722 723 // Returns true if this class loader data is one of the 3 builtin 724 // (boot, application/system or platform) class loaders. Note, the 725 // builtin loaders are not freed by a GC. 726 bool ClassLoaderData::is_builtin_class_loader_data() const { 727 return (is_the_null_class_loader_data() || 728 SystemDictionary::is_system_class_loader(class_loader()) || 729 SystemDictionary::is_platform_class_loader(class_loader())); 730 } 731 732 Metaspace* ClassLoaderData::metaspace_non_null() { 733 assert(!DumpSharedSpaces, "wrong metaspace!"); 734 // If the metaspace has not been allocated, create a new one. Might want 735 // to create smaller arena for Reflection class loaders also. 736 // The reason for the delayed allocation is because some class loaders are 737 // simply for delegating with no metadata of their own. 738 // Lock-free access requires load_ptr_acquire. 739 Metaspace* metaspace = load_ptr_acquire(&_metaspace); 740 if (metaspace == NULL) { 741 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); 742 // Check if _metaspace got allocated while we were waiting for this lock. 743 if ((metaspace = _metaspace) == NULL) { 744 if (this == the_null_class_loader_data()) { 745 assert (class_loader() == NULL, "Must be"); 746 metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType); 747 } else if (is_anonymous()) { 748 if (class_loader() != NULL) { 749 log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name()); 750 } 751 metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); 752 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 753 if (class_loader() != NULL) { 754 log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name()); 755 } 756 metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); 757 } else { 758 metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); 759 } 760 // Ensure _metaspace is stable, since it is examined without a lock 761 OrderAccess::release_store_ptr(&_metaspace, metaspace); 762 } 763 } 764 return metaspace; 765 } 766 767 jobject ClassLoaderData::add_handle(Handle h) { 768 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 769 return (jobject) _handles.add(h()); 770 } 771 772 void ClassLoaderData::remove_handle_unsafe(jobject h) { 773 assert(_handles.contains((oop*) h), "Got unexpected handle " PTR_FORMAT, p2i((oop*) h)); 774 *((oop*) h) = NULL; 775 } 776 777 // Add this metadata pointer to be freed when it's safe. This is only during 778 // class unloading because Handles might point to this metadata field. 779 void ClassLoaderData::add_to_deallocate_list(Metadata* m) { 780 // Metadata in shared region isn't deleted. 781 if (!m->is_shared()) { 782 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 783 if (_deallocate_list == NULL) { 784 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); 785 } 786 _deallocate_list->append_if_missing(m); 787 } 788 } 789 790 // Deallocate free metadata on the free list. How useful the PermGen was! 791 void ClassLoaderData::free_deallocate_list() { 792 // Don't need lock, at safepoint 793 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 794 if (_deallocate_list == NULL) { 795 return; 796 } 797 // Go backwards because this removes entries that are freed. 798 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 799 Metadata* m = _deallocate_list->at(i); 800 if (!m->on_stack()) { 801 _deallocate_list->remove_at(i); 802 // There are only three types of metadata that we deallocate directly. 803 // Cast them so they can be used by the template function. 804 if (m->is_method()) { 805 MetadataFactory::free_metadata(this, (Method*)m); 806 } else if (m->is_constantPool()) { 807 MetadataFactory::free_metadata(this, (ConstantPool*)m); 808 } else if (m->is_klass()) { 809 MetadataFactory::free_metadata(this, (InstanceKlass*)m); 810 } else { 811 ShouldNotReachHere(); 812 } 813 } else { 814 // Metadata is alive. 815 // If scratch_class is on stack then it shouldn't be on this list! 816 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), 817 "scratch classes on this list should be dead"); 818 // Also should assert that other metadata on the list was found in handles. 819 } 820 } 821 } 822 823 // These anonymous class loaders are to contain classes used for JSR292 824 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(oop loader, TRAPS) { 825 // Add a new class loader data to the graph. 826 Handle lh(THREAD, loader); 827 return ClassLoaderDataGraph::add(lh, true, THREAD); 828 } 829 830 const char* ClassLoaderData::loader_name() { 831 // Handles null class loader 832 return SystemDictionary::loader_name(class_loader()); 833 } 834 835 #ifndef PRODUCT 836 // Define to dump klasses 837 #undef CLD_DUMP_KLASSES 838 839 void ClassLoaderData::dump(outputStream * const out) { 840 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {", 841 p2i(this), p2i((void *)class_loader()), 842 p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name()); 843 if (claimed()) out->print(" claimed "); 844 if (is_unloading()) out->print(" unloading "); 845 out->cr(); 846 if (metaspace_or_null() != NULL) { 847 out->print_cr("metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null())); 848 metaspace_or_null()->dump(out); 849 } else { 850 out->print_cr("metaspace: NULL"); 851 } 852 853 #ifdef CLD_DUMP_KLASSES 854 if (Verbose) { 855 Klass* k = _klasses; 856 while (k != NULL) { 857 out->print_cr("klass " PTR_FORMAT ", %s, CT: %d, MUT: %d", k, k->name()->as_C_string(), 858 k->has_modified_oops(), k->has_accumulated_modified_oops()); 859 assert(k != k->next_link(), "no loops!"); 860 k = k->next_link(); 861 } 862 } 863 #endif // CLD_DUMP_KLASSES 864 #undef CLD_DUMP_KLASSES 865 if (_jmethod_ids != NULL) { 866 Method::print_jmethod_ids(this, out); 867 } 868 out->print_cr("}"); 869 } 870 #endif // PRODUCT 871 872 void ClassLoaderData::verify() { 873 assert_locked_or_safepoint(_metaspace_lock); 874 oop cl = class_loader(); 875 876 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same"); 877 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be"); 878 879 // Verify the integrity of the allocated space. 880 if (metaspace_or_null() != NULL) { 881 metaspace_or_null()->verify(); 882 } 883 884 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 885 guarantee(k->class_loader_data() == this, "Must be the same"); 886 k->verify(); 887 assert(k != k->next_link(), "no loops!"); 888 } 889 } 890 891 bool ClassLoaderData::contains_klass(Klass* klass) { 892 // Lock-free access requires load_ptr_acquire 893 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 894 if (k == klass) return true; 895 } 896 return false; 897 } 898 899 900 // GC root of class loader data created. 901 ClassLoaderData* ClassLoaderDataGraph::_head = NULL; 902 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL; 903 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL; 904 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL; 905 906 bool ClassLoaderDataGraph::_should_purge = false; 907 bool ClassLoaderDataGraph::_metaspace_oom = false; 908 909 // Add a new class loader data node to the list. Assign the newly created 910 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field 911 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) { 912 // We need to allocate all the oops for the ClassLoaderData before allocating the 913 // actual ClassLoaderData object. 914 ClassLoaderData::Dependencies dependencies(CHECK_NULL); 915 916 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the 917 // ClassLoaderData in the graph since the CLD 918 // contains unhandled oops 919 920 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies); 921 922 923 if (!is_anonymous) { 924 ClassLoaderData** cld_addr = java_lang_ClassLoader::loader_data_addr(loader()); 925 // First, Atomically set it 926 ClassLoaderData* old = (ClassLoaderData*) Atomic::cmpxchg_ptr(cld, cld_addr, NULL); 927 if (old != NULL) { 928 delete cld; 929 // Returns the data. 930 return old; 931 } 932 } 933 934 // We won the race, and therefore the task of adding the data to the list of 935 // class loader data 936 ClassLoaderData** list_head = &_head; 937 ClassLoaderData* next = _head; 938 939 do { 940 cld->set_next(next); 941 ClassLoaderData* exchanged = (ClassLoaderData*)Atomic::cmpxchg_ptr(cld, list_head, next); 942 if (exchanged == next) { 943 if (log_is_enabled(Debug, class, loader, data)) { 944 PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual 945 log_creation(loader, cld, CHECK_NULL); 946 } 947 return cld; 948 } 949 next = exchanged; 950 } while (true); 951 } 952 953 void ClassLoaderDataGraph::log_creation(Handle loader, ClassLoaderData* cld, TRAPS) { 954 Handle string; 955 if (loader.not_null()) { 956 // Include the result of loader.toString() in the output. This allows 957 // the user of the log to identify the class loader instance. 958 JavaValue result(T_OBJECT); 959 Klass* spec_klass = SystemDictionary::ClassLoader_klass(); 960 JavaCalls::call_virtual(&result, 961 loader, 962 spec_klass, 963 vmSymbols::toString_name(), 964 vmSymbols::void_string_signature(), 965 CHECK); 966 assert(result.get_type() == T_OBJECT, "just checking"); 967 string = Handle(THREAD, (oop)result.get_jobject()); 968 } 969 970 ResourceMark rm; 971 outputStream* log = Log(class, loader, data)::debug_stream(); 972 log->print("create class loader data " INTPTR_FORMAT, p2i(cld)); 973 log->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)cld->class_loader()), 974 cld->loader_name()); 975 976 if (string.not_null()) { 977 log->print(": "); 978 java_lang_String::print(string(), log); 979 } 980 log->cr(); 981 } 982 983 984 void ClassLoaderDataGraph::oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) { 985 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 986 cld->oops_do(f, klass_closure, must_claim); 987 } 988 } 989 990 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) { 991 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 992 if (cld->keep_alive()) { 993 cld->oops_do(f, klass_closure, must_claim); 994 } 995 } 996 } 997 998 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) { 999 if (ClassUnloading) { 1000 keep_alive_oops_do(f, klass_closure, must_claim); 1001 } else { 1002 oops_do(f, klass_closure, must_claim); 1003 } 1004 } 1005 1006 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) { 1007 for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) { 1008 cl->do_cld(cld); 1009 } 1010 } 1011 1012 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { 1013 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1014 // Only walk the head until any clds not purged from prior unloading 1015 // (CMS doesn't purge right away). 1016 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1017 assert(cld->is_unloading(), "invariant"); 1018 cl->do_cld(cld); 1019 } 1020 } 1021 1022 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { 1023 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { 1024 CLDClosure* closure = cld->keep_alive() ? strong : weak; 1025 if (closure != NULL) { 1026 closure->do_cld(cld); 1027 } 1028 } 1029 } 1030 1031 void ClassLoaderDataGraph::keep_alive_cld_do(CLDClosure* cl) { 1032 roots_cld_do(cl, NULL); 1033 } 1034 1035 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) { 1036 if (ClassUnloading) { 1037 keep_alive_cld_do(cl); 1038 } else { 1039 cld_do(cl); 1040 } 1041 } 1042 1043 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) { 1044 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1045 cld->classes_do(klass_closure); 1046 } 1047 } 1048 1049 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) { 1050 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1051 cld->classes_do(f); 1052 } 1053 } 1054 1055 void ClassLoaderDataGraph::methods_do(void f(Method*)) { 1056 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1057 cld->methods_do(f); 1058 } 1059 } 1060 1061 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) { 1062 assert_locked_or_safepoint(Module_lock); 1063 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1064 cld->modules_do(f); 1065 } 1066 } 1067 1068 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) { 1069 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1070 // Only walk the head until any clds not purged from prior unloading 1071 // (CMS doesn't purge right away). 1072 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1073 assert(cld->is_unloading(), "invariant"); 1074 cld->modules_do(f); 1075 } 1076 } 1077 1078 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) { 1079 assert_locked_or_safepoint(Module_lock); 1080 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1081 cld->packages_do(f); 1082 } 1083 } 1084 1085 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) { 1086 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1087 // Only walk the head until any clds not purged from prior unloading 1088 // (CMS doesn't purge right away). 1089 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1090 assert(cld->is_unloading(), "invariant"); 1091 cld->packages_do(f); 1092 } 1093 } 1094 1095 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) { 1096 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1097 cld->loaded_classes_do(klass_closure); 1098 } 1099 } 1100 1101 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) { 1102 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1103 // Only walk the head until any clds not purged from prior unloading 1104 // (CMS doesn't purge right away). 1105 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1106 assert(cld->is_unloading(), "invariant"); 1107 cld->classes_do(f); 1108 } 1109 } 1110 1111 #define FOR_ALL_DICTIONARY(X) for (ClassLoaderData* X = _head; X != NULL; X = X->next()) \ 1112 if (X->dictionary_or_null() != NULL) 1113 1114 // Walk classes in the loaded class dictionaries in various forms. 1115 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) { 1116 FOR_ALL_DICTIONARY(cld) { 1117 cld->dictionary()->classes_do(f); 1118 } 1119 } 1120 1121 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) { 1122 FOR_ALL_DICTIONARY(cld) { 1123 cld->dictionary()->classes_do(f, CHECK); 1124 } 1125 } 1126 1127 void ClassLoaderDataGraph::dictionary_all_entries_do(void f(InstanceKlass*, ClassLoaderData*)) { 1128 FOR_ALL_DICTIONARY(cld) { 1129 cld->dictionary()->all_entries_do(f); 1130 } 1131 } 1132 1133 void ClassLoaderDataGraph::verify_dictionary() { 1134 FOR_ALL_DICTIONARY(cld) { 1135 cld->dictionary()->verify(); 1136 } 1137 } 1138 1139 void ClassLoaderDataGraph::print_dictionary(bool details) { 1140 FOR_ALL_DICTIONARY(cld) { 1141 tty->print("Dictionary for class loader "); 1142 cld->print_value(); 1143 tty->cr(); 1144 cld->dictionary()->print(details); 1145 } 1146 } 1147 1148 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() { 1149 assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?"); 1150 1151 GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>(); 1152 1153 // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true); 1154 ClassLoaderData* curr = _head; 1155 while (curr != _saved_head) { 1156 if (!curr->claimed()) { 1157 array->push(curr); 1158 1159 if (log_is_enabled(Debug, class, loader, data)) { 1160 outputStream* log = Log(class, loader, data)::debug_stream(); 1161 log->print("found new CLD: "); 1162 curr->print_value_on(log); 1163 log->cr(); 1164 } 1165 } 1166 1167 curr = curr->_next; 1168 } 1169 1170 return array; 1171 } 1172 1173 bool ClassLoaderDataGraph::unload_list_contains(const void* x) { 1174 assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint"); 1175 for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) { 1176 if (cld->metaspace_or_null() != NULL && cld->metaspace_or_null()->contains(x)) { 1177 return true; 1178 } 1179 } 1180 return false; 1181 } 1182 1183 #ifndef PRODUCT 1184 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) { 1185 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1186 if (loader_data == data) { 1187 return true; 1188 } 1189 } 1190 1191 return false; 1192 } 1193 #endif // PRODUCT 1194 1195 1196 // Move class loader data from main list to the unloaded list for unloading 1197 // and deallocation later. 1198 bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure, 1199 bool clean_previous_versions) { 1200 1201 ClassLoaderData* data = _head; 1202 ClassLoaderData* prev = NULL; 1203 bool seen_dead_loader = false; 1204 1205 // Mark metadata seen on the stack only so we can delete unneeded entries. 1206 // Only walk all metadata, including the expensive code cache walk, for Full GC 1207 // and only if class redefinition and if there's previous versions of 1208 // Klasses to delete. 1209 bool walk_all_metadata = clean_previous_versions && 1210 JvmtiExport::has_redefined_a_class() && 1211 InstanceKlass::has_previous_versions_and_reset(); 1212 MetadataOnStackMark md_on_stack(walk_all_metadata); 1213 1214 // Save previous _unloading pointer for CMS which may add to unloading list before 1215 // purging and we don't want to rewalk the previously unloaded class loader data. 1216 _saved_unloading = _unloading; 1217 1218 data = _head; 1219 while (data != NULL) { 1220 if (data->is_alive(is_alive_closure)) { 1221 // clean metaspace 1222 if (walk_all_metadata) { 1223 data->classes_do(InstanceKlass::purge_previous_versions); 1224 } 1225 data->free_deallocate_list(); 1226 prev = data; 1227 data = data->next(); 1228 continue; 1229 } 1230 seen_dead_loader = true; 1231 ClassLoaderData* dead = data; 1232 dead->unload(); 1233 data = data->next(); 1234 // Remove from loader list. 1235 // This class loader data will no longer be found 1236 // in the ClassLoaderDataGraph. 1237 if (prev != NULL) { 1238 prev->set_next(data); 1239 } else { 1240 assert(dead == _head, "sanity check"); 1241 _head = data; 1242 } 1243 dead->set_next(_unloading); 1244 _unloading = dead; 1245 } 1246 1247 if (seen_dead_loader) { 1248 // Walk a Dictionary, ModuleEntry's reads, and a PackageEntry's exports 1249 // lists to determine if there are modules on those lists that are now 1250 // dead and should be removed. A module's life cycle is equivalent 1251 // to its defining class loader's life cycle. Since a module is 1252 // considered dead if its class loader is dead, these walks must 1253 // occur after each class loader's aliveness is determined. 1254 data = _head; 1255 while (data != NULL) { 1256 if (data->dictionary_or_null() != NULL) { 1257 data->dictionary()->do_unloading(); 1258 } 1259 if (data->packages() != NULL) { 1260 data->packages()->purge_all_package_exports(); 1261 } 1262 if (data->modules_defined()) { 1263 data->modules()->purge_all_module_reads(); 1264 } 1265 data = data->next(); 1266 } 1267 1268 post_class_unload_events(); 1269 } 1270 1271 return seen_dead_loader; 1272 } 1273 1274 void ClassLoaderDataGraph::purge() { 1275 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1276 ClassLoaderData* list = _unloading; 1277 _unloading = NULL; 1278 ClassLoaderData* next = list; 1279 bool classes_unloaded = false; 1280 while (next != NULL) { 1281 ClassLoaderData* purge_me = next; 1282 next = purge_me->next(); 1283 delete purge_me; 1284 classes_unloaded = true; 1285 } 1286 if (classes_unloaded) { 1287 Metaspace::purge(); 1288 set_metaspace_oom(false); 1289 } 1290 } 1291 1292 void ClassLoaderDataGraph::post_class_unload_events() { 1293 #if INCLUDE_TRACE 1294 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1295 if (Tracing::enabled()) { 1296 if (Tracing::is_event_enabled(TraceClassUnloadEvent)) { 1297 assert(_unloading != NULL, "need class loader data unload list!"); 1298 _class_unload_time = Ticks::now(); 1299 classes_unloading_do(&class_unload_event); 1300 } 1301 Tracing::on_unloading_classes(); 1302 } 1303 #endif 1304 } 1305 1306 // CDS support 1307 1308 // Global metaspaces for writing information to the shared archive. When 1309 // application CDS is supported, we may need one per metaspace, so this 1310 // sort of looks like it. 1311 Metaspace* ClassLoaderData::_ro_metaspace = NULL; 1312 Metaspace* ClassLoaderData::_rw_metaspace = NULL; 1313 static bool _shared_metaspaces_initialized = false; 1314 1315 // Initialize shared metaspaces (change to call from somewhere not lazily) 1316 void ClassLoaderData::initialize_shared_metaspaces() { 1317 assert(DumpSharedSpaces, "only use this for dumping shared spaces"); 1318 assert(this == ClassLoaderData::the_null_class_loader_data(), 1319 "only supported for null loader data for now"); 1320 assert (!_shared_metaspaces_initialized, "only initialize once"); 1321 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 1322 _ro_metaspace = new Metaspace(_metaspace_lock, Metaspace::ROMetaspaceType); 1323 _rw_metaspace = new Metaspace(_metaspace_lock, Metaspace::ReadWriteMetaspaceType); 1324 _shared_metaspaces_initialized = true; 1325 } 1326 1327 Metaspace* ClassLoaderData::ro_metaspace() { 1328 assert(_ro_metaspace != NULL, "should already be initialized"); 1329 return _ro_metaspace; 1330 } 1331 1332 Metaspace* ClassLoaderData::rw_metaspace() { 1333 assert(_rw_metaspace != NULL, "should already be initialized"); 1334 return _rw_metaspace; 1335 } 1336 1337 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic() 1338 : _next_klass(NULL) { 1339 ClassLoaderData* cld = ClassLoaderDataGraph::_head; 1340 Klass* klass = NULL; 1341 1342 // Find the first klass in the CLDG. 1343 while (cld != NULL) { 1344 assert_locked_or_safepoint(cld->metaspace_lock()); 1345 klass = cld->_klasses; 1346 if (klass != NULL) { 1347 _next_klass = klass; 1348 return; 1349 } 1350 cld = cld->next(); 1351 } 1352 } 1353 1354 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) { 1355 Klass* next = klass->next_link(); 1356 if (next != NULL) { 1357 return next; 1358 } 1359 1360 // No more klasses in the current CLD. Time to find a new CLD. 1361 ClassLoaderData* cld = klass->class_loader_data(); 1362 assert_locked_or_safepoint(cld->metaspace_lock()); 1363 while (next == NULL) { 1364 cld = cld->next(); 1365 if (cld == NULL) { 1366 break; 1367 } 1368 next = cld->_klasses; 1369 } 1370 1371 return next; 1372 } 1373 1374 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() { 1375 Klass* head = _next_klass; 1376 1377 while (head != NULL) { 1378 Klass* next = next_klass_in_cldg(head); 1379 1380 Klass* old_head = (Klass*)Atomic::cmpxchg_ptr(next, &_next_klass, head); 1381 1382 if (old_head == head) { 1383 return head; // Won the CAS. 1384 } 1385 1386 head = old_head; 1387 } 1388 1389 // Nothing more for the iterator to hand out. 1390 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head)); 1391 return NULL; 1392 } 1393 1394 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() { 1395 _data = ClassLoaderDataGraph::_head; 1396 } 1397 1398 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {} 1399 1400 #ifndef PRODUCT 1401 // callable from debugger 1402 extern "C" int print_loader_data_graph() { 1403 ClassLoaderDataGraph::dump_on(tty); 1404 return 0; 1405 } 1406 1407 void ClassLoaderDataGraph::verify() { 1408 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1409 data->verify(); 1410 } 1411 } 1412 1413 void ClassLoaderDataGraph::dump_on(outputStream * const out) { 1414 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1415 data->dump(out); 1416 } 1417 MetaspaceAux::dump(out); 1418 } 1419 #endif // PRODUCT 1420 1421 void ClassLoaderData::print_value_on(outputStream* out) const { 1422 if (class_loader() == NULL) { 1423 out->print("NULL class_loader"); 1424 } else { 1425 out->print("class loader " INTPTR_FORMAT " ", p2i(this)); 1426 class_loader()->print_value_on(out); 1427 } 1428 } 1429 1430 void ClassLoaderData::print_on(outputStream* out) const { 1431 if (class_loader() == NULL) { 1432 out->print("NULL class_loader"); 1433 } else { 1434 out->print("class loader " INTPTR_FORMAT " ", p2i(this)); 1435 class_loader()->print_on(out); 1436 } 1437 } 1438 1439 #if INCLUDE_TRACE 1440 1441 Ticks ClassLoaderDataGraph::_class_unload_time; 1442 1443 void ClassLoaderDataGraph::class_unload_event(Klass* const k) { 1444 assert(k != NULL, "invariant"); 1445 1446 // post class unload event 1447 EventClassUnload event(UNTIMED); 1448 event.set_endtime(_class_unload_time); 1449 event.set_unloadedClass(k); 1450 event.set_definingClassLoader(k->class_loader_data()); 1451 event.commit(); 1452 } 1453 1454 #endif // INCLUDE_TRACE