1 /* 2 * Copyright (c) 2012, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 // A ClassLoaderData identifies the full set of class types that a class 26 // loader's name resolution strategy produces for a given configuration of the 27 // class loader. 28 // Class types in the ClassLoaderData may be defined by from class file binaries 29 // provided by the class loader, or from other class loader it interacts with 30 // according to its name resolution strategy. 31 // 32 // Class loaders that implement a deterministic name resolution strategy 33 // (including with respect to their delegation behavior), such as the boot, the 34 // platform, and the system loaders of the JDK's built-in class loader 35 // hierarchy, always produce the same linkset for a given configuration. 36 // 37 // ClassLoaderData carries information related to a linkset (e.g., 38 // metaspace holding its klass definitions). 39 // The System Dictionary and related data structures (e.g., placeholder table, 40 // loader constraints table) as well as the runtime representation of classes 41 // only reference ClassLoaderData. 42 // 43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that 44 // that represent the loader's "linking domain" in the JVM. 45 // 46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData, 47 // the singleton class the_null_class_loader_data(). 48 49 #include "precompiled.hpp" 50 #include "classfile/classLoaderData.hpp" 51 #include "classfile/classLoaderData.inline.hpp" 52 #include "classfile/dictionary.hpp" 53 #include "classfile/javaClasses.hpp" 54 #include "classfile/metadataOnStackMark.hpp" 55 #include "classfile/moduleEntry.hpp" 56 #include "classfile/packageEntry.hpp" 57 #include "classfile/systemDictionary.hpp" 58 #include "code/codeCache.hpp" 59 #include "gc/shared/gcLocker.hpp" 60 #include "logging/log.hpp" 61 #include "logging/logStream.hpp" 62 #include "memory/metadataFactory.hpp" 63 #include "memory/metaspaceShared.hpp" 64 #include "memory/oopFactory.hpp" 65 #include "memory/resourceArea.hpp" 66 #include "oops/access.inline.hpp" 67 #include "oops/objArrayOop.inline.hpp" 68 #include "oops/oop.inline.hpp" 69 #include "runtime/atomic.hpp" 70 #include "runtime/javaCalls.hpp" 71 #include "runtime/jniHandles.hpp" 72 #include "runtime/mutex.hpp" 73 #include "runtime/orderAccess.hpp" 74 #include "runtime/safepoint.hpp" 75 #include "runtime/synchronizer.hpp" 76 #include "utilities/growableArray.hpp" 77 #include "utilities/macros.hpp" 78 #include "utilities/ostream.hpp" 79 #if INCLUDE_TRACE 80 #include "trace/tracing.hpp" 81 #endif 82 83 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0; 84 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0; 85 86 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL; 87 88 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) : 89 _class_loader(h_class_loader()), 90 _is_anonymous(is_anonymous), 91 // An anonymous class loader data doesn't have anything to keep 92 // it from being unloaded during parsing of the anonymous class. 93 // The null-class-loader should always be kept alive. 94 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0), 95 _metaspace(NULL), _unloading(false), _klasses(NULL), 96 _modules(NULL), _packages(NULL), 97 _claimed(0), _modified_oops(true), _accumulated_modified_oops(false), 98 _jmethod_ids(NULL), _handles(), _deallocate_list(NULL), 99 _next(NULL), _dependencies(dependencies), 100 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true, 101 Monitor::_safepoint_check_never)) { 102 103 // A ClassLoaderData created solely for an anonymous class should never have a 104 // ModuleEntryTable or PackageEntryTable created for it. The defining package 105 // and module for an anonymous class will be found in its host class. 106 if (!is_anonymous) { 107 _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size); 108 if (h_class_loader.is_null()) { 109 // Create unnamed module for boot loader 110 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this); 111 } else { 112 // Create unnamed module for all other loaders 113 _unnamed_module = ModuleEntry::create_unnamed_module(this); 114 } 115 } else { 116 _unnamed_module = NULL; 117 } 118 119 if (!is_anonymous) { 120 _dictionary = create_dictionary(); 121 } else { 122 _dictionary = NULL; 123 } 124 TRACE_INIT_ID(this); 125 } 126 127 void ClassLoaderData::init_dependencies(TRAPS) { 128 assert(!Universe::is_fully_initialized(), "should only be called when initializing"); 129 assert(is_the_null_class_loader_data(), "should only call this for the null class loader"); 130 _dependencies.init(CHECK); 131 } 132 133 void ClassLoaderData::Dependencies::init(TRAPS) { 134 // Create empty dependencies array to add to. CMS requires this to be 135 // an oop so that it can track additions via card marks. We think. 136 _list_head = oopFactory::new_objectArray(2, CHECK); 137 } 138 139 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() { 140 Chunk* c = _head; 141 while (c != NULL) { 142 Chunk* next = c->_next; 143 delete c; 144 c = next; 145 } 146 } 147 148 oop* ClassLoaderData::ChunkedHandleList::add(oop o) { 149 if (_head == NULL || _head->_size == Chunk::CAPACITY) { 150 Chunk* next = new Chunk(_head); 151 OrderAccess::release_store(&_head, next); 152 } 153 oop* handle = &_head->_data[_head->_size]; 154 *handle = o; 155 OrderAccess::release_store(&_head->_size, _head->_size + 1); 156 return handle; 157 } 158 159 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) { 160 for (juint i = 0; i < size; i++) { 161 if (c->_data[i] != NULL) { 162 f->do_oop(&c->_data[i]); 163 } 164 } 165 } 166 167 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) { 168 Chunk* head = OrderAccess::load_acquire(&_head); 169 if (head != NULL) { 170 // Must be careful when reading size of head 171 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size)); 172 for (Chunk* c = head->_next; c != NULL; c = c->_next) { 173 oops_do_chunk(f, c, c->_size); 174 } 175 } 176 } 177 178 #ifdef ASSERT 179 class VerifyContainsOopClosure : public OopClosure { 180 oop* _target; 181 bool _found; 182 183 public: 184 VerifyContainsOopClosure(oop* target) : _target(target), _found(false) {} 185 186 void do_oop(oop* p) { 187 if (p == _target) { 188 _found = true; 189 } 190 } 191 192 void do_oop(narrowOop* p) { 193 // The ChunkedHandleList should not contain any narrowOop 194 ShouldNotReachHere(); 195 } 196 197 bool found() const { 198 return _found; 199 } 200 }; 201 202 bool ClassLoaderData::ChunkedHandleList::contains(oop* p) { 203 VerifyContainsOopClosure cl(p); 204 oops_do(&cl); 205 return cl.found(); 206 } 207 #endif // ASSERT 208 209 bool ClassLoaderData::claim() { 210 if (_claimed == 1) { 211 return false; 212 } 213 214 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0; 215 } 216 217 // Anonymous classes have their own ClassLoaderData that is marked to keep alive 218 // while the class is being parsed, and if the class appears on the module fixup list. 219 // Due to the uniqueness that no other class shares the anonymous class' name or 220 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while 221 // it is being defined, therefore _keep_alive is not volatile or atomic. 222 void ClassLoaderData::inc_keep_alive() { 223 if (is_anonymous()) { 224 assert(_keep_alive >= 0, "Invalid keep alive increment count"); 225 _keep_alive++; 226 } 227 } 228 229 void ClassLoaderData::dec_keep_alive() { 230 if (is_anonymous()) { 231 assert(_keep_alive > 0, "Invalid keep alive decrement count"); 232 _keep_alive--; 233 } 234 } 235 236 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) { 237 if (must_claim && !claim()) { 238 return; 239 } 240 241 // Only clear modified_oops after the ClassLoaderData is claimed. 242 if (clear_mod_oops) { 243 clear_modified_oops(); 244 } 245 246 f->do_oop(&_class_loader); 247 _dependencies.oops_do(f); 248 _handles.oops_do(f); 249 } 250 251 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) { 252 f->do_oop((oop*)&_list_head); 253 } 254 255 void ClassLoaderData::classes_do(KlassClosure* klass_closure) { 256 // Lock-free access requires load_acquire 257 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 258 klass_closure->do_klass(k); 259 assert(k != k->next_link(), "no loops!"); 260 } 261 } 262 263 void ClassLoaderData::classes_do(void f(Klass * const)) { 264 // Lock-free access requires load_acquire 265 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 266 f(k); 267 assert(k != k->next_link(), "no loops!"); 268 } 269 } 270 271 void ClassLoaderData::methods_do(void f(Method*)) { 272 // Lock-free access requires load_acquire 273 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 274 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) { 275 InstanceKlass::cast(k)->methods_do(f); 276 } 277 } 278 } 279 280 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) { 281 // Lock-free access requires load_acquire 282 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 283 // Do not filter ArrayKlass oops here... 284 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) { 285 klass_closure->do_klass(k); 286 } 287 } 288 } 289 290 void ClassLoaderData::classes_do(void f(InstanceKlass*)) { 291 // Lock-free access requires load_acquire 292 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 293 if (k->is_instance_klass()) { 294 f(InstanceKlass::cast(k)); 295 } 296 assert(k != k->next_link(), "no loops!"); 297 } 298 } 299 300 void ClassLoaderData::modules_do(void f(ModuleEntry*)) { 301 assert_locked_or_safepoint(Module_lock); 302 if (_unnamed_module != NULL) { 303 f(_unnamed_module); 304 } 305 if (_modules != NULL) { 306 for (int i = 0; i < _modules->table_size(); i++) { 307 for (ModuleEntry* entry = _modules->bucket(i); 308 entry != NULL; 309 entry = entry->next()) { 310 f(entry); 311 } 312 } 313 } 314 } 315 316 void ClassLoaderData::packages_do(void f(PackageEntry*)) { 317 assert_locked_or_safepoint(Module_lock); 318 if (_packages != NULL) { 319 for (int i = 0; i < _packages->table_size(); i++) { 320 for (PackageEntry* entry = _packages->bucket(i); 321 entry != NULL; 322 entry = entry->next()) { 323 f(entry); 324 } 325 } 326 } 327 } 328 329 void ClassLoaderData::record_dependency(const Klass* k, TRAPS) { 330 assert(k != NULL, "invariant"); 331 332 ClassLoaderData * const from_cld = this; 333 ClassLoaderData * const to_cld = k->class_loader_data(); 334 335 // Do not need to record dependency if the dependency is to a class whose 336 // class loader data is never freed. (i.e. the dependency's class loader 337 // is one of the three builtin class loaders and the dependency is not 338 // anonymous.) 339 if (to_cld->is_permanent_class_loader_data()) { 340 return; 341 } 342 343 oop to; 344 if (to_cld->is_anonymous()) { 345 // Just return if an anonymous class is attempting to record a dependency 346 // to itself. (Note that every anonymous class has its own unique class 347 // loader data.) 348 if (to_cld == from_cld) { 349 return; 350 } 351 // Anonymous class dependencies are through the mirror. 352 to = k->java_mirror(); 353 } else { 354 to = to_cld->class_loader(); 355 oop from = from_cld->class_loader(); 356 357 // Just return if this dependency is to a class with the same or a parent 358 // class_loader. 359 if (from == to || java_lang_ClassLoader::isAncestor(from, to)) { 360 return; // this class loader is in the parent list, no need to add it. 361 } 362 } 363 364 // It's a dependency we won't find through GC, add it. This is relatively rare. 365 // Must handle over GC point. 366 Handle dependency(THREAD, to); 367 from_cld->_dependencies.add(dependency, CHECK); 368 369 // Added a potentially young gen oop to the ClassLoaderData 370 record_modified_oops(); 371 } 372 373 374 void ClassLoaderData::Dependencies::add(Handle dependency, TRAPS) { 375 // Check first if this dependency is already in the list. 376 // Save a pointer to the last to add to under the lock. 377 objArrayOop ok = _list_head; 378 objArrayOop last = NULL; 379 while (ok != NULL) { 380 last = ok; 381 if (ok->obj_at(0) == dependency()) { 382 // Don't need to add it 383 return; 384 } 385 ok = (objArrayOop)ok->obj_at(1); 386 } 387 388 // Must handle over GC points 389 assert (last != NULL, "dependencies should be initialized"); 390 objArrayHandle last_handle(THREAD, last); 391 392 // Create a new dependency node with fields for (class_loader or mirror, next) 393 objArrayOop deps = oopFactory::new_objectArray(2, CHECK); 394 deps->obj_at_put(0, dependency()); 395 396 // Must handle over GC points 397 objArrayHandle new_dependency(THREAD, deps); 398 399 // Add the dependency under lock 400 locked_add(last_handle, new_dependency, THREAD); 401 } 402 403 void ClassLoaderData::Dependencies::locked_add(objArrayHandle last_handle, 404 objArrayHandle new_dependency, 405 Thread* THREAD) { 406 407 // Have to lock and put the new dependency on the end of the dependency 408 // array so the card mark for CMS sees that this dependency is new. 409 // Can probably do this lock free with some effort. 410 ObjectLocker ol(Handle(THREAD, _list_head), THREAD); 411 412 oop loader_or_mirror = new_dependency->obj_at(0); 413 414 // Since the dependencies are only added, add to the end. 415 objArrayOop end = last_handle(); 416 objArrayOop last = NULL; 417 while (end != NULL) { 418 last = end; 419 // check again if another thread added it to the end. 420 if (end->obj_at(0) == loader_or_mirror) { 421 // Don't need to add it 422 return; 423 } 424 end = (objArrayOop)end->obj_at(1); 425 } 426 assert (last != NULL, "dependencies should be initialized"); 427 // fill in the first element with the oop in new_dependency. 428 if (last->obj_at(0) == NULL) { 429 last->obj_at_put(0, new_dependency->obj_at(0)); 430 } else { 431 last->obj_at_put(1, new_dependency()); 432 } 433 } 434 435 void ClassLoaderDataGraph::clear_claimed_marks() { 436 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 437 cld->clear_claimed(); 438 } 439 } 440 441 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) { 442 { 443 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 444 Klass* old_value = _klasses; 445 k->set_next_link(old_value); 446 // Link the new item into the list, making sure the linked class is stable 447 // since the list can be walked without a lock 448 OrderAccess::release_store(&_klasses, k); 449 if (k->is_array_klass()) { 450 ClassLoaderDataGraph::inc_array_classes(1); 451 } else { 452 ClassLoaderDataGraph::inc_instance_classes(1); 453 } 454 } 455 456 if (publicize && k->class_loader_data() != NULL) { 457 ResourceMark rm; 458 log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: " 459 PTR_FORMAT " loader: " PTR_FORMAT " %s", 460 p2i(k), 461 k->external_name(), 462 p2i(k->class_loader_data()), 463 p2i((void *)k->class_loader()), 464 loader_name()); 465 } 466 } 467 468 // Class iterator used by the compiler. It gets some number of classes at 469 // a safepoint to decay invocation counters on the methods. 470 class ClassLoaderDataGraphKlassIteratorStatic { 471 ClassLoaderData* _current_loader_data; 472 Klass* _current_class_entry; 473 public: 474 475 ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {} 476 477 InstanceKlass* try_get_next_class() { 478 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 479 size_t max_classes = ClassLoaderDataGraph::num_instance_classes(); 480 assert(max_classes > 0, "should not be called with no instance classes"); 481 for (size_t i = 0; i < max_classes; ) { 482 483 if (_current_class_entry != NULL) { 484 Klass* k = _current_class_entry; 485 _current_class_entry = _current_class_entry->next_link(); 486 487 if (k->is_instance_klass()) { 488 InstanceKlass* ik = InstanceKlass::cast(k); 489 i++; // count all instance classes found 490 // Not yet loaded classes are counted in max_classes 491 // but only return loaded classes. 492 if (ik->is_loaded()) { 493 return ik; 494 } 495 } 496 } else { 497 // Go to next CLD 498 if (_current_loader_data != NULL) { 499 _current_loader_data = _current_loader_data->next(); 500 } 501 // Start at the beginning 502 if (_current_loader_data == NULL) { 503 _current_loader_data = ClassLoaderDataGraph::_head; 504 } 505 506 _current_class_entry = _current_loader_data->klasses(); 507 } 508 } 509 // Should never be reached unless all instance classes have failed or are not fully loaded. 510 // Caller handles NULL. 511 return NULL; 512 } 513 514 // If the current class for the static iterator is a class being unloaded or 515 // deallocated, adjust the current class. 516 void adjust_saved_class(ClassLoaderData* cld) { 517 if (_current_loader_data == cld) { 518 _current_loader_data = cld->next(); 519 if (_current_loader_data != NULL) { 520 _current_class_entry = _current_loader_data->klasses(); 521 } // else try_get_next_class will start at the head 522 } 523 } 524 525 void adjust_saved_class(Klass* klass) { 526 if (_current_class_entry == klass) { 527 _current_class_entry = klass->next_link(); 528 } 529 } 530 }; 531 532 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator; 533 534 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() { 535 return static_klass_iterator.try_get_next_class(); 536 } 537 538 539 // Remove a klass from the _klasses list for scratch_class during redefinition 540 // or parsed class in the case of an error. 541 void ClassLoaderData::remove_class(Klass* scratch_class) { 542 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 543 544 // Adjust global class iterator. 545 static_klass_iterator.adjust_saved_class(scratch_class); 546 547 Klass* prev = NULL; 548 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 549 if (k == scratch_class) { 550 if (prev == NULL) { 551 _klasses = k->next_link(); 552 } else { 553 Klass* next = k->next_link(); 554 prev->set_next_link(next); 555 } 556 557 if (k->is_array_klass()) { 558 ClassLoaderDataGraph::dec_array_classes(1); 559 } else { 560 ClassLoaderDataGraph::dec_instance_classes(1); 561 } 562 563 return; 564 } 565 prev = k; 566 assert(k != k->next_link(), "no loops!"); 567 } 568 ShouldNotReachHere(); // should have found this class!! 569 } 570 571 void ClassLoaderData::unload() { 572 _unloading = true; 573 574 // Tell serviceability tools these classes are unloading 575 classes_do(InstanceKlass::notify_unload_class); 576 577 LogTarget(Debug, class, loader, data) lt; 578 if (lt.is_enabled()) { 579 ResourceMark rm; 580 LogStream ls(lt); 581 ls.print(": unload loader data " INTPTR_FORMAT, p2i(this)); 582 ls.print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()), 583 loader_name()); 584 if (is_anonymous()) { 585 ls.print(" for anonymous class " INTPTR_FORMAT " ", p2i(_klasses)); 586 } 587 ls.cr(); 588 } 589 590 // Some items on the _deallocate_list need to free their C heap structures 591 // if they are not already on the _klasses list. 592 unload_deallocate_list(); 593 594 // Clean up global class iterator for compiler 595 static_klass_iterator.adjust_saved_class(this); 596 } 597 598 ModuleEntryTable* ClassLoaderData::modules() { 599 // Lazily create the module entry table at first request. 600 // Lock-free access requires load_acquire. 601 ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules); 602 if (modules == NULL) { 603 MutexLocker m1(Module_lock); 604 // Check if _modules got allocated while we were waiting for this lock. 605 if ((modules = _modules) == NULL) { 606 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size); 607 608 { 609 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 610 // Ensure _modules is stable, since it is examined without a lock 611 OrderAccess::release_store(&_modules, modules); 612 } 613 } 614 } 615 return modules; 616 } 617 618 const int _boot_loader_dictionary_size = 1009; 619 const int _default_loader_dictionary_size = 107; 620 621 Dictionary* ClassLoaderData::create_dictionary() { 622 assert(!is_anonymous(), "anonymous class loader data do not have a dictionary"); 623 int size; 624 bool resizable = false; 625 if (_the_null_class_loader_data == NULL) { 626 size = _boot_loader_dictionary_size; 627 resizable = true; 628 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 629 size = 1; // there's only one class in relection class loader and no initiated classes 630 } else if (is_system_class_loader_data()) { 631 size = _boot_loader_dictionary_size; 632 resizable = true; 633 } else { 634 size = _default_loader_dictionary_size; 635 resizable = true; 636 } 637 if (!DynamicallyResizeSystemDictionaries || DumpSharedSpaces || UseSharedSpaces) { 638 resizable = false; 639 } 640 return new Dictionary(this, size, resizable); 641 } 642 643 // Unloading support 644 oop ClassLoaderData::keep_alive_object() const { 645 assert_locked_or_safepoint(_metaspace_lock); 646 assert(!keep_alive(), "Don't use with CLDs that are artificially kept alive"); 647 return is_anonymous() ? _klasses->java_mirror() : class_loader(); 648 } 649 650 bool ClassLoaderData::is_alive(BoolObjectClosure* is_alive_closure) const { 651 bool alive = keep_alive() // null class loader and incomplete anonymous klasses. 652 || is_alive_closure->do_object_b(keep_alive_object()); 653 654 return alive; 655 } 656 657 class ReleaseKlassClosure: public KlassClosure { 658 private: 659 size_t _instance_class_released; 660 size_t _array_class_released; 661 public: 662 ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { } 663 664 size_t instance_class_released() const { return _instance_class_released; } 665 size_t array_class_released() const { return _array_class_released; } 666 667 void do_klass(Klass* k) { 668 if (k->is_array_klass()) { 669 _array_class_released ++; 670 } else { 671 assert(k->is_instance_klass(), "Must be"); 672 _instance_class_released ++; 673 InstanceKlass::release_C_heap_structures(InstanceKlass::cast(k)); 674 } 675 } 676 }; 677 678 ClassLoaderData::~ClassLoaderData() { 679 // Release C heap structures for all the classes. 680 ReleaseKlassClosure cl; 681 classes_do(&cl); 682 683 ClassLoaderDataGraph::dec_array_classes(cl.array_class_released()); 684 ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released()); 685 686 // Release C heap allocated hashtable for all the packages. 687 if (_packages != NULL) { 688 // Destroy the table itself 689 delete _packages; 690 _packages = NULL; 691 } 692 693 // Release C heap allocated hashtable for all the modules. 694 if (_modules != NULL) { 695 // Destroy the table itself 696 delete _modules; 697 _modules = NULL; 698 } 699 700 // Release C heap allocated hashtable for the dictionary 701 if (_dictionary != NULL) { 702 // Destroy the table itself 703 delete _dictionary; 704 _dictionary = NULL; 705 } 706 707 if (_unnamed_module != NULL) { 708 _unnamed_module->delete_unnamed_module(); 709 _unnamed_module = NULL; 710 } 711 712 // release the metaspace 713 Metaspace *m = _metaspace; 714 if (m != NULL) { 715 _metaspace = NULL; 716 delete m; 717 } 718 // Clear all the JNI handles for methods 719 // These aren't deallocated and are going to look like a leak, but that's 720 // needed because we can't really get rid of jmethodIDs because we don't 721 // know when native code is going to stop using them. The spec says that 722 // they're "invalid" but existing programs likely rely on their being 723 // NULL after class unloading. 724 if (_jmethod_ids != NULL) { 725 Method::clear_jmethod_ids(this); 726 } 727 // Delete lock 728 delete _metaspace_lock; 729 730 // Delete free list 731 if (_deallocate_list != NULL) { 732 delete _deallocate_list; 733 } 734 } 735 736 // Returns true if this class loader data is for the app class loader 737 // or a user defined system class loader. (Note that the class loader 738 // data may be anonymous.) 739 bool ClassLoaderData::is_system_class_loader_data() const { 740 return SystemDictionary::is_system_class_loader(class_loader()); 741 } 742 743 // Returns true if this class loader data is for the platform class loader. 744 // (Note that the class loader data may be anonymous.) 745 bool ClassLoaderData::is_platform_class_loader_data() const { 746 return SystemDictionary::is_platform_class_loader(class_loader()); 747 } 748 749 // Returns true if the class loader for this class loader data is one of 750 // the 3 builtin (boot application/system or platform) class loaders, 751 // including a user-defined system class loader. Note that if the class 752 // loader data is for an anonymous class then it may get freed by a GC 753 // even if its class loader is one of these loaders. 754 bool ClassLoaderData::is_builtin_class_loader_data() const { 755 return (is_boot_class_loader_data() || 756 SystemDictionary::is_system_class_loader(class_loader()) || 757 SystemDictionary::is_platform_class_loader(class_loader())); 758 } 759 760 // Returns true if this class loader data is a class loader data 761 // that is not ever freed by a GC. It must be one of the builtin 762 // class loaders and not anonymous. 763 bool ClassLoaderData::is_permanent_class_loader_data() const { 764 return is_builtin_class_loader_data() && !is_anonymous(); 765 } 766 767 Metaspace* ClassLoaderData::metaspace_non_null() { 768 // If the metaspace has not been allocated, create a new one. Might want 769 // to create smaller arena for Reflection class loaders also. 770 // The reason for the delayed allocation is because some class loaders are 771 // simply for delegating with no metadata of their own. 772 // Lock-free access requires load_acquire. 773 Metaspace* metaspace = OrderAccess::load_acquire(&_metaspace); 774 if (metaspace == NULL) { 775 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); 776 // Check if _metaspace got allocated while we were waiting for this lock. 777 if ((metaspace = _metaspace) == NULL) { 778 if (this == the_null_class_loader_data()) { 779 assert (class_loader() == NULL, "Must be"); 780 metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType); 781 } else if (is_anonymous()) { 782 if (class_loader() != NULL) { 783 log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name()); 784 } 785 metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); 786 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 787 if (class_loader() != NULL) { 788 log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name()); 789 } 790 metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); 791 } else { 792 metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); 793 } 794 // Ensure _metaspace is stable, since it is examined without a lock 795 OrderAccess::release_store(&_metaspace, metaspace); 796 } 797 } 798 return metaspace; 799 } 800 801 OopHandle ClassLoaderData::add_handle(Handle h) { 802 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 803 record_modified_oops(); 804 return OopHandle(_handles.add(h())); 805 } 806 807 void ClassLoaderData::remove_handle(OopHandle h) { 808 assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading"); 809 oop* ptr = h.ptr_raw(); 810 if (ptr != NULL) { 811 assert(_handles.contains(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr)); 812 // This root is not walked in safepoints, and hence requires an appropriate 813 // decorator that e.g. maintains the SATB invariant in SATB collectors. 814 RootAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, oop(NULL)); 815 } 816 } 817 818 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) { 819 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 820 if (dest.resolve() != NULL) { 821 return; 822 } else { 823 dest = _handles.add(h()); 824 } 825 } 826 827 // Add this metadata pointer to be freed when it's safe. This is only during 828 // class unloading because Handles might point to this metadata field. 829 void ClassLoaderData::add_to_deallocate_list(Metadata* m) { 830 // Metadata in shared region isn't deleted. 831 if (!m->is_shared()) { 832 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 833 if (_deallocate_list == NULL) { 834 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); 835 } 836 _deallocate_list->append_if_missing(m); 837 } 838 } 839 840 // Deallocate free metadata on the free list. How useful the PermGen was! 841 void ClassLoaderData::free_deallocate_list() { 842 // Don't need lock, at safepoint 843 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 844 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading"); 845 if (_deallocate_list == NULL) { 846 return; 847 } 848 // Go backwards because this removes entries that are freed. 849 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 850 Metadata* m = _deallocate_list->at(i); 851 if (!m->on_stack()) { 852 _deallocate_list->remove_at(i); 853 // There are only three types of metadata that we deallocate directly. 854 // Cast them so they can be used by the template function. 855 if (m->is_method()) { 856 MetadataFactory::free_metadata(this, (Method*)m); 857 } else if (m->is_constantPool()) { 858 MetadataFactory::free_metadata(this, (ConstantPool*)m); 859 } else if (m->is_klass()) { 860 MetadataFactory::free_metadata(this, (InstanceKlass*)m); 861 } else { 862 ShouldNotReachHere(); 863 } 864 } else { 865 // Metadata is alive. 866 // If scratch_class is on stack then it shouldn't be on this list! 867 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), 868 "scratch classes on this list should be dead"); 869 // Also should assert that other metadata on the list was found in handles. 870 } 871 } 872 } 873 874 // This is distinct from free_deallocate_list. For class loader data that are 875 // unloading, this frees the C heap memory for items on the list, and unlinks 876 // scratch or error classes so that unloading events aren't triggered for these 877 // classes. The metadata is removed with the unloading metaspace. 878 // There isn't C heap memory allocated for methods, so nothing is done for them. 879 void ClassLoaderData::unload_deallocate_list() { 880 // Don't need lock, at safepoint 881 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 882 assert(is_unloading(), "only called for ClassLoaderData that are unloading"); 883 if (_deallocate_list == NULL) { 884 return; 885 } 886 // Go backwards because this removes entries that are freed. 887 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 888 Metadata* m = _deallocate_list->at(i); 889 assert (!m->on_stack(), "wouldn't be unloading if this were so"); 890 _deallocate_list->remove_at(i); 891 if (m->is_constantPool()) { 892 ((ConstantPool*)m)->release_C_heap_structures(); 893 } else if (m->is_klass()) { 894 InstanceKlass* ik = (InstanceKlass*)m; 895 // also releases ik->constants() C heap memory 896 InstanceKlass::release_C_heap_structures(ik); 897 // Remove the class so unloading events aren't triggered for 898 // this class (scratch or error class) in do_unloading(). 899 remove_class(ik); 900 } 901 } 902 } 903 904 // These anonymous class loaders are to contain classes used for JSR292 905 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(oop loader, TRAPS) { 906 // Add a new class loader data to the graph. 907 Handle lh(THREAD, loader); 908 return ClassLoaderDataGraph::add(lh, true, THREAD); 909 } 910 911 const char* ClassLoaderData::loader_name() { 912 // Handles null class loader 913 return SystemDictionary::loader_name(class_loader()); 914 } 915 916 #ifndef PRODUCT 917 // Define to dump klasses 918 #undef CLD_DUMP_KLASSES 919 920 void ClassLoaderData::dump(outputStream * const out) { 921 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {", 922 p2i(this), p2i((void *)class_loader()), 923 p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name()); 924 if (claimed()) out->print(" claimed "); 925 if (is_unloading()) out->print(" unloading "); 926 out->cr(); 927 if (metaspace_or_null() != NULL) { 928 out->print_cr("metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null())); 929 metaspace_or_null()->dump(out); 930 } else { 931 out->print_cr("metaspace: NULL"); 932 } 933 934 #ifdef CLD_DUMP_KLASSES 935 if (Verbose) { 936 Klass* k = _klasses; 937 while (k != NULL) { 938 out->print_cr("klass " PTR_FORMAT ", %s", p2i(k), k->name()->as_C_string()); 939 assert(k != k->next_link(), "no loops!"); 940 k = k->next_link(); 941 } 942 } 943 #endif // CLD_DUMP_KLASSES 944 #undef CLD_DUMP_KLASSES 945 if (_jmethod_ids != NULL) { 946 Method::print_jmethod_ids(this, out); 947 } 948 out->print_cr("}"); 949 } 950 #endif // PRODUCT 951 952 void ClassLoaderData::verify() { 953 assert_locked_or_safepoint(_metaspace_lock); 954 oop cl = class_loader(); 955 956 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same"); 957 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be"); 958 959 // Verify the integrity of the allocated space. 960 if (metaspace_or_null() != NULL) { 961 metaspace_or_null()->verify(); 962 } 963 964 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 965 guarantee(k->class_loader_data() == this, "Must be the same"); 966 k->verify(); 967 assert(k != k->next_link(), "no loops!"); 968 } 969 } 970 971 bool ClassLoaderData::contains_klass(Klass* klass) { 972 // Lock-free access requires load_acquire 973 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) { 974 if (k == klass) return true; 975 } 976 return false; 977 } 978 979 980 // GC root of class loader data created. 981 ClassLoaderData* ClassLoaderDataGraph::_head = NULL; 982 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL; 983 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL; 984 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL; 985 986 bool ClassLoaderDataGraph::_should_purge = false; 987 bool ClassLoaderDataGraph::_metaspace_oom = false; 988 989 // Add a new class loader data node to the list. Assign the newly created 990 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field 991 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) { 992 // We need to allocate all the oops for the ClassLoaderData before allocating the 993 // actual ClassLoaderData object. 994 ClassLoaderData::Dependencies dependencies(CHECK_NULL); 995 996 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the 997 // ClassLoaderData in the graph since the CLD 998 // contains unhandled oops 999 1000 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies); 1001 1002 1003 if (!is_anonymous) { 1004 // First, Atomically set it 1005 ClassLoaderData* old = java_lang_ClassLoader::cmpxchg_loader_data(cld, loader(), NULL); 1006 if (old != NULL) { 1007 delete cld; 1008 // Returns the data. 1009 return old; 1010 } 1011 } 1012 1013 // We won the race, and therefore the task of adding the data to the list of 1014 // class loader data 1015 ClassLoaderData** list_head = &_head; 1016 ClassLoaderData* next = _head; 1017 1018 do { 1019 cld->set_next(next); 1020 ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next); 1021 if (exchanged == next) { 1022 LogTarget(Debug, class, loader, data) lt; 1023 if (lt.is_enabled()) { 1024 PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual 1025 LogStream ls(lt); 1026 print_creation(&ls, loader, cld, CHECK_NULL); 1027 } 1028 return cld; 1029 } 1030 next = exchanged; 1031 } while (true); 1032 } 1033 1034 void ClassLoaderDataGraph::print_creation(outputStream* out, Handle loader, ClassLoaderData* cld, TRAPS) { 1035 Handle string; 1036 if (loader.not_null()) { 1037 // Include the result of loader.toString() in the output. This allows 1038 // the user of the log to identify the class loader instance. 1039 JavaValue result(T_OBJECT); 1040 Klass* spec_klass = SystemDictionary::ClassLoader_klass(); 1041 JavaCalls::call_virtual(&result, 1042 loader, 1043 spec_klass, 1044 vmSymbols::toString_name(), 1045 vmSymbols::void_string_signature(), 1046 CHECK); 1047 assert(result.get_type() == T_OBJECT, "just checking"); 1048 string = Handle(THREAD, (oop)result.get_jobject()); 1049 } 1050 1051 ResourceMark rm; 1052 out->print("create class loader data " INTPTR_FORMAT, p2i(cld)); 1053 out->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)cld->class_loader()), 1054 cld->loader_name()); 1055 1056 if (string.not_null()) { 1057 out->print(": "); 1058 java_lang_String::print(string(), out); 1059 } 1060 out->cr(); 1061 } 1062 1063 1064 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) { 1065 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1066 cld->oops_do(f, must_claim); 1067 } 1068 } 1069 1070 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) { 1071 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1072 if (cld->keep_alive()) { 1073 cld->oops_do(f, must_claim); 1074 } 1075 } 1076 } 1077 1078 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) { 1079 if (ClassUnloading) { 1080 keep_alive_oops_do(f, must_claim); 1081 } else { 1082 oops_do(f, must_claim); 1083 } 1084 } 1085 1086 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) { 1087 for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) { 1088 cl->do_cld(cld); 1089 } 1090 } 1091 1092 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { 1093 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1094 // Only walk the head until any clds not purged from prior unloading 1095 // (CMS doesn't purge right away). 1096 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1097 assert(cld->is_unloading(), "invariant"); 1098 cl->do_cld(cld); 1099 } 1100 } 1101 1102 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { 1103 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { 1104 CLDClosure* closure = cld->keep_alive() ? strong : weak; 1105 if (closure != NULL) { 1106 closure->do_cld(cld); 1107 } 1108 } 1109 } 1110 1111 void ClassLoaderDataGraph::keep_alive_cld_do(CLDClosure* cl) { 1112 roots_cld_do(cl, NULL); 1113 } 1114 1115 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) { 1116 if (ClassUnloading) { 1117 keep_alive_cld_do(cl); 1118 } else { 1119 cld_do(cl); 1120 } 1121 } 1122 1123 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) { 1124 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1125 cld->classes_do(klass_closure); 1126 } 1127 } 1128 1129 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) { 1130 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1131 cld->classes_do(f); 1132 } 1133 } 1134 1135 void ClassLoaderDataGraph::methods_do(void f(Method*)) { 1136 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1137 cld->methods_do(f); 1138 } 1139 } 1140 1141 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) { 1142 assert_locked_or_safepoint(Module_lock); 1143 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1144 cld->modules_do(f); 1145 } 1146 } 1147 1148 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) { 1149 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1150 // Only walk the head until any clds not purged from prior unloading 1151 // (CMS doesn't purge right away). 1152 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1153 assert(cld->is_unloading(), "invariant"); 1154 cld->modules_do(f); 1155 } 1156 } 1157 1158 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) { 1159 assert_locked_or_safepoint(Module_lock); 1160 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1161 cld->packages_do(f); 1162 } 1163 } 1164 1165 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) { 1166 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1167 // Only walk the head until any clds not purged from prior unloading 1168 // (CMS doesn't purge right away). 1169 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1170 assert(cld->is_unloading(), "invariant"); 1171 cld->packages_do(f); 1172 } 1173 } 1174 1175 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) { 1176 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 1177 cld->loaded_classes_do(klass_closure); 1178 } 1179 } 1180 1181 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) { 1182 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1183 // Only walk the head until any clds not purged from prior unloading 1184 // (CMS doesn't purge right away). 1185 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 1186 assert(cld->is_unloading(), "invariant"); 1187 cld->classes_do(f); 1188 } 1189 } 1190 1191 #define FOR_ALL_DICTIONARY(X) for (ClassLoaderData* X = _head; X != NULL; X = X->next()) \ 1192 if (X->dictionary() != NULL) 1193 1194 // Walk classes in the loaded class dictionaries in various forms. 1195 // Only walks the classes defined in this class loader. 1196 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) { 1197 FOR_ALL_DICTIONARY(cld) { 1198 cld->dictionary()->classes_do(f); 1199 } 1200 } 1201 1202 // Only walks the classes defined in this class loader. 1203 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) { 1204 FOR_ALL_DICTIONARY(cld) { 1205 cld->dictionary()->classes_do(f, CHECK); 1206 } 1207 } 1208 1209 // Walks all entries in the dictionary including entries initiated by this class loader. 1210 void ClassLoaderDataGraph::dictionary_all_entries_do(void f(InstanceKlass*, ClassLoaderData*)) { 1211 FOR_ALL_DICTIONARY(cld) { 1212 cld->dictionary()->all_entries_do(f); 1213 } 1214 } 1215 1216 void ClassLoaderDataGraph::verify_dictionary() { 1217 FOR_ALL_DICTIONARY(cld) { 1218 cld->dictionary()->verify(); 1219 } 1220 } 1221 1222 void ClassLoaderDataGraph::print_dictionary(outputStream* st) { 1223 FOR_ALL_DICTIONARY(cld) { 1224 st->print("Dictionary for "); 1225 cld->print_value_on(st); 1226 st->cr(); 1227 cld->dictionary()->print_on(st); 1228 st->cr(); 1229 } 1230 } 1231 1232 void ClassLoaderDataGraph::print_dictionary_statistics(outputStream* st) { 1233 FOR_ALL_DICTIONARY(cld) { 1234 ResourceMark rm; 1235 stringStream tempst; 1236 tempst.print("System Dictionary for %s", cld->loader_name()); 1237 cld->dictionary()->print_table_statistics(st, tempst.as_string()); 1238 } 1239 } 1240 1241 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() { 1242 assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?"); 1243 1244 GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>(); 1245 1246 // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true); 1247 ClassLoaderData* curr = _head; 1248 while (curr != _saved_head) { 1249 if (!curr->claimed()) { 1250 array->push(curr); 1251 LogTarget(Debug, class, loader, data) lt; 1252 if (lt.is_enabled()) { 1253 LogStream ls(lt); 1254 ls.print("found new CLD: "); 1255 curr->print_value_on(&ls); 1256 ls.cr(); 1257 } 1258 } 1259 1260 curr = curr->_next; 1261 } 1262 1263 return array; 1264 } 1265 1266 bool ClassLoaderDataGraph::unload_list_contains(const void* x) { 1267 assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint"); 1268 for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) { 1269 if (cld->metaspace_or_null() != NULL && cld->metaspace_or_null()->contains(x)) { 1270 return true; 1271 } 1272 } 1273 return false; 1274 } 1275 1276 #ifndef PRODUCT 1277 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) { 1278 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1279 if (loader_data == data) { 1280 return true; 1281 } 1282 } 1283 1284 return false; 1285 } 1286 #endif // PRODUCT 1287 1288 1289 // Move class loader data from main list to the unloaded list for unloading 1290 // and deallocation later. 1291 bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure, 1292 bool clean_previous_versions) { 1293 1294 ClassLoaderData* data = _head; 1295 ClassLoaderData* prev = NULL; 1296 bool seen_dead_loader = false; 1297 1298 // Mark metadata seen on the stack only so we can delete unneeded entries. 1299 // Only walk all metadata, including the expensive code cache walk, for Full GC 1300 // and only if class redefinition and if there's previous versions of 1301 // Klasses to delete. 1302 bool walk_all_metadata = clean_previous_versions && 1303 JvmtiExport::has_redefined_a_class() && 1304 InstanceKlass::has_previous_versions_and_reset(); 1305 MetadataOnStackMark md_on_stack(walk_all_metadata); 1306 1307 // Save previous _unloading pointer for CMS which may add to unloading list before 1308 // purging and we don't want to rewalk the previously unloaded class loader data. 1309 _saved_unloading = _unloading; 1310 1311 data = _head; 1312 while (data != NULL) { 1313 if (data->is_alive(is_alive_closure)) { 1314 // clean metaspace 1315 if (walk_all_metadata) { 1316 data->classes_do(InstanceKlass::purge_previous_versions); 1317 } 1318 data->free_deallocate_list(); 1319 prev = data; 1320 data = data->next(); 1321 continue; 1322 } 1323 seen_dead_loader = true; 1324 ClassLoaderData* dead = data; 1325 dead->unload(); 1326 data = data->next(); 1327 // Remove from loader list. 1328 // This class loader data will no longer be found 1329 // in the ClassLoaderDataGraph. 1330 if (prev != NULL) { 1331 prev->set_next(data); 1332 } else { 1333 assert(dead == _head, "sanity check"); 1334 _head = data; 1335 } 1336 dead->set_next(_unloading); 1337 _unloading = dead; 1338 } 1339 1340 if (seen_dead_loader) { 1341 data = _head; 1342 while (data != NULL) { 1343 // Remove entries in the dictionary of live class loader that have 1344 // initiated loading classes in a dead class loader. 1345 if (data->dictionary() != NULL) { 1346 data->dictionary()->do_unloading(is_alive_closure); 1347 } 1348 // Walk a ModuleEntry's reads, and a PackageEntry's exports 1349 // lists to determine if there are modules on those lists that are now 1350 // dead and should be removed. A module's life cycle is equivalent 1351 // to its defining class loader's life cycle. Since a module is 1352 // considered dead if its class loader is dead, these walks must 1353 // occur after each class loader's aliveness is determined. 1354 if (data->packages() != NULL) { 1355 data->packages()->purge_all_package_exports(); 1356 } 1357 if (data->modules_defined()) { 1358 data->modules()->purge_all_module_reads(); 1359 } 1360 data = data->next(); 1361 } 1362 1363 post_class_unload_events(); 1364 } 1365 1366 return seen_dead_loader; 1367 } 1368 1369 void ClassLoaderDataGraph::purge() { 1370 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1371 ClassLoaderData* list = _unloading; 1372 _unloading = NULL; 1373 ClassLoaderData* next = list; 1374 bool classes_unloaded = false; 1375 while (next != NULL) { 1376 ClassLoaderData* purge_me = next; 1377 next = purge_me->next(); 1378 delete purge_me; 1379 classes_unloaded = true; 1380 } 1381 if (classes_unloaded) { 1382 Metaspace::purge(); 1383 set_metaspace_oom(false); 1384 } 1385 } 1386 1387 int ClassLoaderDataGraph::resize_if_needed() { 1388 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1389 int resized = 0; 1390 if (Dictionary::does_any_dictionary_needs_resizing()) { 1391 FOR_ALL_DICTIONARY(cld) { 1392 if (cld->dictionary()->resize_if_needed()) { 1393 resized++; 1394 } 1395 } 1396 } 1397 return resized; 1398 } 1399 1400 void ClassLoaderDataGraph::post_class_unload_events() { 1401 #if INCLUDE_TRACE 1402 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1403 if (Tracing::enabled()) { 1404 if (Tracing::is_event_enabled(TraceClassUnloadEvent)) { 1405 assert(_unloading != NULL, "need class loader data unload list!"); 1406 _class_unload_time = Ticks::now(); 1407 classes_unloading_do(&class_unload_event); 1408 } 1409 Tracing::on_unloading_classes(); 1410 } 1411 #endif 1412 } 1413 1414 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic() 1415 : _next_klass(NULL) { 1416 ClassLoaderData* cld = ClassLoaderDataGraph::_head; 1417 Klass* klass = NULL; 1418 1419 // Find the first klass in the CLDG. 1420 while (cld != NULL) { 1421 assert_locked_or_safepoint(cld->metaspace_lock()); 1422 klass = cld->_klasses; 1423 if (klass != NULL) { 1424 _next_klass = klass; 1425 return; 1426 } 1427 cld = cld->next(); 1428 } 1429 } 1430 1431 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) { 1432 Klass* next = klass->next_link(); 1433 if (next != NULL) { 1434 return next; 1435 } 1436 1437 // No more klasses in the current CLD. Time to find a new CLD. 1438 ClassLoaderData* cld = klass->class_loader_data(); 1439 assert_locked_or_safepoint(cld->metaspace_lock()); 1440 while (next == NULL) { 1441 cld = cld->next(); 1442 if (cld == NULL) { 1443 break; 1444 } 1445 next = cld->_klasses; 1446 } 1447 1448 return next; 1449 } 1450 1451 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() { 1452 Klass* head = _next_klass; 1453 1454 while (head != NULL) { 1455 Klass* next = next_klass_in_cldg(head); 1456 1457 Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head); 1458 1459 if (old_head == head) { 1460 return head; // Won the CAS. 1461 } 1462 1463 head = old_head; 1464 } 1465 1466 // Nothing more for the iterator to hand out. 1467 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head)); 1468 return NULL; 1469 } 1470 1471 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() { 1472 _data = ClassLoaderDataGraph::_head; 1473 } 1474 1475 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {} 1476 1477 #ifndef PRODUCT 1478 // callable from debugger 1479 extern "C" int print_loader_data_graph() { 1480 ClassLoaderDataGraph::dump_on(tty); 1481 return 0; 1482 } 1483 1484 void ClassLoaderDataGraph::verify() { 1485 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1486 data->verify(); 1487 } 1488 } 1489 1490 void ClassLoaderDataGraph::dump_on(outputStream * const out) { 1491 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1492 data->dump(out); 1493 } 1494 MetaspaceAux::dump(out); 1495 } 1496 #endif // PRODUCT 1497 1498 void ClassLoaderData::print_value_on(outputStream* out) const { 1499 if (class_loader() == NULL) { 1500 out->print("NULL class loader"); 1501 } else { 1502 out->print("class loader " INTPTR_FORMAT " ", p2i(this)); 1503 class_loader()->print_value_on(out); 1504 } 1505 } 1506 1507 void ClassLoaderData::print_on(outputStream* out) const { 1508 if (class_loader() == NULL) { 1509 out->print("NULL class loader"); 1510 } else { 1511 out->print("class loader " INTPTR_FORMAT " ", p2i(this)); 1512 class_loader()->print_on(out); 1513 } 1514 } 1515 1516 #if INCLUDE_TRACE 1517 1518 Ticks ClassLoaderDataGraph::_class_unload_time; 1519 1520 void ClassLoaderDataGraph::class_unload_event(Klass* const k) { 1521 assert(k != NULL, "invariant"); 1522 1523 // post class unload event 1524 EventClassUnload event(UNTIMED); 1525 event.set_endtime(_class_unload_time); 1526 event.set_unloadedClass(k); 1527 event.set_definingClassLoader(k->class_loader_data()); 1528 event.commit(); 1529 } 1530 1531 #endif // INCLUDE_TRACE