1 /* 2 * Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 // A ClassLoaderData identifies the full set of class types that a class 26 // loader's name resolution strategy produces for a given configuration of the 27 // class loader. 28 // Class types in the ClassLoaderData may be defined by from class file binaries 29 // provided by the class loader, or from other class loader it interacts with 30 // according to its name resolution strategy. 31 // 32 // Class loaders that implement a deterministic name resolution strategy 33 // (including with respect to their delegation behavior), such as the boot, the 34 // platform, and the system loaders of the JDK's built-in class loader 35 // hierarchy, always produce the same linkset for a given configuration. 36 // 37 // ClassLoaderData carries information related to a linkset (e.g., 38 // metaspace holding its klass definitions). 39 // The System Dictionary and related data structures (e.g., placeholder table, 40 // loader constraints table) as well as the runtime representation of classes 41 // only reference ClassLoaderData. 42 // 43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that 44 // that represent the loader's "linking domain" in the JVM. 45 // 46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData, 47 // the singleton class the_null_class_loader_data(). 48 49 #include "precompiled.hpp" 50 #include "classfile/classLoaderData.hpp" 51 #include "classfile/classLoaderData.inline.hpp" 52 #include "classfile/javaClasses.hpp" 53 #include "classfile/metadataOnStackMark.hpp" 54 #include "classfile/moduleEntry.hpp" 55 #include "classfile/packageEntry.hpp" 56 #include "classfile/systemDictionary.hpp" 57 #include "code/codeCache.hpp" 58 #include "gc/shared/gcLocker.hpp" 59 #include "logging/log.hpp" 60 #include "memory/metadataFactory.hpp" 61 #include "memory/metaspaceShared.hpp" 62 #include "memory/oopFactory.hpp" 63 #include "memory/resourceArea.hpp" 64 #include "oops/objArrayOop.inline.hpp" 65 #include "oops/oop.inline.hpp" 66 #include "runtime/atomic.hpp" 67 #include "runtime/javaCalls.hpp" 68 #include "runtime/jniHandles.hpp" 69 #include "runtime/mutex.hpp" 70 #include "runtime/orderAccess.hpp" 71 #include "runtime/safepoint.hpp" 72 #include "runtime/synchronizer.hpp" 73 #include "utilities/growableArray.hpp" 74 #include "utilities/macros.hpp" 75 #include "utilities/ostream.hpp" 76 #if INCLUDE_TRACE 77 #include "trace/tracing.hpp" 78 #endif 79 80 // helper function to avoid in-line casts 81 template <typename T> static T* load_ptr_acquire(T* volatile *p) { 82 return static_cast<T*>(OrderAccess::load_ptr_acquire(p)); 83 } 84 85 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL; 86 87 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) : 88 _class_loader(h_class_loader()), 89 _is_anonymous(is_anonymous), 90 // An anonymous class loader data doesn't have anything to keep 91 // it from being unloaded during parsing of the anonymous class. 92 // The null-class-loader should always be kept alive. 93 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0), 94 _metaspace(NULL), _unloading(false), _klasses(NULL), 95 _modules(NULL), _packages(NULL), 96 _claimed(0), _jmethod_ids(NULL), _handles(NULL), _deallocate_list(NULL), 97 _next(NULL), _dependencies(dependencies), _shared_class_loader_id(-1), 98 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true, 99 Monitor::_safepoint_check_never)) { 100 TRACE_INIT_ID(this); 101 } 102 103 void ClassLoaderData::init_dependencies(TRAPS) { 104 assert(!Universe::is_fully_initialized(), "should only be called when initializing"); 105 assert(is_the_null_class_loader_data(), "should only call this for the null class loader"); 106 _dependencies.init(CHECK); 107 } 108 109 void ClassLoaderData::Dependencies::init(TRAPS) { 110 // Create empty dependencies array to add to. CMS requires this to be 111 // an oop so that it can track additions via card marks. We think. 112 _list_head = oopFactory::new_objectArray(2, CHECK); 113 } 114 115 bool ClassLoaderData::claim() { 116 if (_claimed == 1) { 117 return false; 118 } 119 120 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0; 121 } 122 123 // Anonymous classes have their own ClassLoaderData that is marked to keep alive 124 // while the class is being parsed, and if the class appears on the module fixup list. 125 // Due to the uniqueness that no other class shares the anonymous class' name or 126 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while 127 // it is being defined, therefore _keep_alive is not volatile or atomic. 128 void ClassLoaderData::inc_keep_alive() { 129 if (is_anonymous()) { 130 assert(_keep_alive >= 0, "Invalid keep alive increment count"); 131 _keep_alive++; 132 } 133 } 134 135 void ClassLoaderData::dec_keep_alive() { 136 if (is_anonymous()) { 137 assert(_keep_alive > 0, "Invalid keep alive decrement count"); 138 _keep_alive--; 139 } 140 } 141 142 void ClassLoaderData::oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) { 143 if (must_claim && !claim()) { 144 return; 145 } 146 147 f->do_oop(&_class_loader); 148 _dependencies.oops_do(f); 149 if (_handles != NULL) { 150 _handles->oops_do(f); 151 } 152 if (klass_closure != NULL) { 153 classes_do(klass_closure); 154 } 155 } 156 157 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) { 158 f->do_oop((oop*)&_list_head); 159 } 160 161 void ClassLoaderData::classes_do(KlassClosure* klass_closure) { 162 // Lock-free access requires load_ptr_acquire 163 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 164 klass_closure->do_klass(k); 165 assert(k != k->next_link(), "no loops!"); 166 } 167 } 168 169 void ClassLoaderData::classes_do(void f(Klass * const)) { 170 // Lock-free access requires load_ptr_acquire 171 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 172 f(k); 173 assert(k != k->next_link(), "no loops!"); 174 } 175 } 176 177 void ClassLoaderData::methods_do(void f(Method*)) { 178 // Lock-free access requires load_ptr_acquire 179 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 180 if (k->is_instance_klass()) { 181 InstanceKlass::cast(k)->methods_do(f); 182 } 183 } 184 } 185 186 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) { 187 // Lock to avoid classes being modified/added/removed during iteration 188 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 189 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 190 // Do not filter ArrayKlass oops here... 191 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) { 192 klass_closure->do_klass(k); 193 } 194 } 195 } 196 197 void ClassLoaderData::classes_do(void f(InstanceKlass*)) { 198 // Lock-free access requires load_ptr_acquire 199 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 200 if (k->is_instance_klass()) { 201 f(InstanceKlass::cast(k)); 202 } 203 assert(k != k->next_link(), "no loops!"); 204 } 205 } 206 207 void ClassLoaderData::modules_do(void f(ModuleEntry*)) { 208 // Lock-free access requires load_ptr_acquire 209 ModuleEntryTable* modules = load_ptr_acquire(&_modules); 210 if (modules != NULL) { 211 for (int i = 0; i < modules->table_size(); i++) { 212 for (ModuleEntry* entry = modules->bucket(i); 213 entry != NULL; 214 entry = entry->next()) { 215 f(entry); 216 } 217 } 218 } 219 } 220 221 void ClassLoaderData::packages_do(void f(PackageEntry*)) { 222 // Lock-free access requires load_ptr_acquire 223 PackageEntryTable* packages = load_ptr_acquire(&_packages); 224 if (packages != NULL) { 225 for (int i = 0; i < packages->table_size(); i++) { 226 for (PackageEntry* entry = packages->bucket(i); 227 entry != NULL; 228 entry = entry->next()) { 229 f(entry); 230 } 231 } 232 } 233 } 234 235 void ClassLoaderData::record_dependency(const Klass* k, TRAPS) { 236 assert(k != NULL, "invariant"); 237 238 ClassLoaderData * const from_cld = this; 239 ClassLoaderData * const to_cld = k->class_loader_data(); 240 241 // Dependency to the null class loader data doesn't need to be recorded 242 // because the null class loader data never goes away. 243 if (to_cld->is_the_null_class_loader_data()) { 244 return; 245 } 246 247 oop to; 248 if (to_cld->is_anonymous()) { 249 // Anonymous class dependencies are through the mirror. 250 to = k->java_mirror(); 251 } else { 252 to = to_cld->class_loader(); 253 254 // If from_cld is anonymous, even if it's class_loader is a parent of 'to' 255 // we still have to add it. The class_loader won't keep from_cld alive. 256 if (!from_cld->is_anonymous()) { 257 // Check that this dependency isn't from the same or parent class_loader 258 oop from = from_cld->class_loader(); 259 260 oop curr = from; 261 while (curr != NULL) { 262 if (curr == to) { 263 return; // this class loader is in the parent list, no need to add it. 264 } 265 curr = java_lang_ClassLoader::parent(curr); 266 } 267 } 268 } 269 270 // It's a dependency we won't find through GC, add it. This is relatively rare 271 // Must handle over GC point. 272 Handle dependency(THREAD, to); 273 from_cld->_dependencies.add(dependency, CHECK); 274 } 275 276 277 void ClassLoaderData::Dependencies::add(Handle dependency, TRAPS) { 278 // Check first if this dependency is already in the list. 279 // Save a pointer to the last to add to under the lock. 280 objArrayOop ok = _list_head; 281 objArrayOop last = NULL; 282 while (ok != NULL) { 283 last = ok; 284 if (ok->obj_at(0) == dependency()) { 285 // Don't need to add it 286 return; 287 } 288 ok = (objArrayOop)ok->obj_at(1); 289 } 290 291 // Must handle over GC points 292 assert (last != NULL, "dependencies should be initialized"); 293 objArrayHandle last_handle(THREAD, last); 294 295 // Create a new dependency node with fields for (class_loader or mirror, next) 296 objArrayOop deps = oopFactory::new_objectArray(2, CHECK); 297 deps->obj_at_put(0, dependency()); 298 299 // Must handle over GC points 300 objArrayHandle new_dependency(THREAD, deps); 301 302 // Add the dependency under lock 303 locked_add(last_handle, new_dependency, THREAD); 304 } 305 306 void ClassLoaderData::Dependencies::locked_add(objArrayHandle last_handle, 307 objArrayHandle new_dependency, 308 Thread* THREAD) { 309 310 // Have to lock and put the new dependency on the end of the dependency 311 // array so the card mark for CMS sees that this dependency is new. 312 // Can probably do this lock free with some effort. 313 ObjectLocker ol(Handle(THREAD, _list_head), THREAD); 314 315 oop loader_or_mirror = new_dependency->obj_at(0); 316 317 // Since the dependencies are only added, add to the end. 318 objArrayOop end = last_handle(); 319 objArrayOop last = NULL; 320 while (end != NULL) { 321 last = end; 322 // check again if another thread added it to the end. 323 if (end->obj_at(0) == loader_or_mirror) { 324 // Don't need to add it 325 return; 326 } 327 end = (objArrayOop)end->obj_at(1); 328 } 329 assert (last != NULL, "dependencies should be initialized"); 330 // fill in the first element with the oop in new_dependency. 331 if (last->obj_at(0) == NULL) { 332 last->obj_at_put(0, new_dependency->obj_at(0)); 333 } else { 334 last->obj_at_put(1, new_dependency()); 335 } 336 } 337 338 void ClassLoaderDataGraph::clear_claimed_marks() { 339 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 340 cld->clear_claimed(); 341 } 342 } 343 344 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) { 345 { 346 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 347 Klass* old_value = _klasses; 348 k->set_next_link(old_value); 349 // Link the new item into the list, making sure the linked class is stable 350 // since the list can be walked without a lock 351 OrderAccess::release_store_ptr(&_klasses, k); 352 } 353 354 if (publicize && k->class_loader_data() != NULL) { 355 ResourceMark rm; 356 log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: " 357 PTR_FORMAT " loader: " PTR_FORMAT " %s", 358 p2i(k), 359 k->external_name(), 360 p2i(k->class_loader_data()), 361 p2i((void *)k->class_loader()), 362 loader_name()); 363 } 364 } 365 366 // Remove a klass from the _klasses list for scratch_class during redefinition 367 // or parsed class in the case of an error. 368 void ClassLoaderData::remove_class(Klass* scratch_class) { 369 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 370 Klass* prev = NULL; 371 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 372 if (k == scratch_class) { 373 if (prev == NULL) { 374 _klasses = k->next_link(); 375 } else { 376 Klass* next = k->next_link(); 377 prev->set_next_link(next); 378 } 379 return; 380 } 381 prev = k; 382 assert(k != k->next_link(), "no loops!"); 383 } 384 ShouldNotReachHere(); // should have found this class!! 385 } 386 387 void ClassLoaderData::unload() { 388 _unloading = true; 389 390 // Tell serviceability tools these classes are unloading 391 classes_do(InstanceKlass::notify_unload_class); 392 393 if (log_is_enabled(Debug, class, loader, data)) { 394 ResourceMark rm; 395 outputStream* log = Log(class, loader, data)::debug_stream(); 396 log->print(": unload loader data " INTPTR_FORMAT, p2i(this)); 397 log->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()), 398 loader_name()); 399 if (is_anonymous()) { 400 log->print(" for anonymous class " INTPTR_FORMAT " ", p2i(_klasses)); 401 } 402 log->cr(); 403 } 404 405 // In some rare cases items added to this list will not be freed elsewhere. 406 // To keep it simple, just free everything in it here. 407 free_deallocate_list(); 408 } 409 410 PackageEntryTable* ClassLoaderData::packages() { 411 // Lazily create the package entry table at first request. 412 // Lock-free access requires load_ptr_acquire. 413 PackageEntryTable* packages = load_ptr_acquire(&_packages); 414 if (packages == NULL) { 415 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 416 // Check if _packages got allocated while we were waiting for this lock. 417 if ((packages = _packages) == NULL) { 418 packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size); 419 // Ensure _packages is stable, since it is examined without a lock 420 OrderAccess::release_store_ptr(&_packages, packages); 421 } 422 } 423 return packages; 424 } 425 426 ModuleEntryTable* ClassLoaderData::modules() { 427 // Lazily create the module entry table at first request. 428 // Lock-free access requires load_ptr_acquire. 429 ModuleEntryTable* modules = load_ptr_acquire(&_modules); 430 if (modules == NULL) { 431 MutexLocker m1(Module_lock); 432 // Check if _modules got allocated while we were waiting for this lock. 433 if ((modules = _modules) == NULL) { 434 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size); 435 // Each loader has one unnamed module entry. Create it before 436 // any classes, loaded by this loader, are defined in case 437 // they end up being defined in loader's unnamed module. 438 modules->create_unnamed_module(this); 439 440 { 441 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 442 // Ensure _modules is stable, since it is examined without a lock 443 OrderAccess::release_store_ptr(&_modules, modules); 444 } 445 } 446 } 447 return modules; 448 } 449 450 oop ClassLoaderData::keep_alive_object() const { 451 assert_locked_or_safepoint(_metaspace_lock); 452 assert(!keep_alive(), "Don't use with CLDs that are artificially kept alive"); 453 return is_anonymous() ? _klasses->java_mirror() : class_loader(); 454 } 455 456 bool ClassLoaderData::is_alive(BoolObjectClosure* is_alive_closure) const { 457 bool alive = keep_alive() // null class loader and incomplete anonymous klasses. 458 || is_alive_closure->do_object_b(keep_alive_object()); 459 460 return alive; 461 } 462 463 464 ClassLoaderData::~ClassLoaderData() { 465 // Release C heap structures for all the classes. 466 classes_do(InstanceKlass::release_C_heap_structures); 467 468 // Release C heap allocated hashtable for all the packages. 469 if (_packages != NULL) { 470 // Destroy the table itself 471 delete _packages; 472 _packages = NULL; 473 } 474 475 // Release C heap allocated hashtable for all the modules. 476 if (_modules != NULL) { 477 // Destroy the table itself 478 delete _modules; 479 _modules = NULL; 480 } 481 482 // release the metaspace 483 Metaspace *m = _metaspace; 484 if (m != NULL) { 485 _metaspace = NULL; 486 delete m; 487 } 488 // release the handles 489 if (_handles != NULL) { 490 JNIHandleBlock::release_block(_handles); 491 _handles = NULL; 492 } 493 494 // Clear all the JNI handles for methods 495 // These aren't deallocated and are going to look like a leak, but that's 496 // needed because we can't really get rid of jmethodIDs because we don't 497 // know when native code is going to stop using them. The spec says that 498 // they're "invalid" but existing programs likely rely on their being 499 // NULL after class unloading. 500 if (_jmethod_ids != NULL) { 501 Method::clear_jmethod_ids(this); 502 } 503 // Delete lock 504 delete _metaspace_lock; 505 506 // Delete free list 507 if (_deallocate_list != NULL) { 508 delete _deallocate_list; 509 } 510 } 511 512 // Returns true if this class loader data is for the system class loader. 513 bool ClassLoaderData::is_system_class_loader_data() const { 514 return SystemDictionary::is_system_class_loader(class_loader()); 515 } 516 517 // Returns true if this class loader data is for the platform class loader. 518 bool ClassLoaderData::is_platform_class_loader_data() const { 519 return SystemDictionary::is_platform_class_loader(class_loader()); 520 } 521 522 // Returns true if this class loader data is one of the 3 builtin 523 // (boot, application/system or platform) class loaders. Note, the 524 // builtin loaders are not freed by a GC. 525 bool ClassLoaderData::is_builtin_class_loader_data() const { 526 Handle classLoaderHandle = class_loader(); 527 return (is_the_null_class_loader_data() || 528 SystemDictionary::is_system_class_loader(classLoaderHandle) || 529 SystemDictionary::is_platform_class_loader(classLoaderHandle)); 530 } 531 532 Metaspace* ClassLoaderData::metaspace_non_null() { 533 assert(!DumpSharedSpaces, "wrong metaspace!"); 534 // If the metaspace has not been allocated, create a new one. Might want 535 // to create smaller arena for Reflection class loaders also. 536 // The reason for the delayed allocation is because some class loaders are 537 // simply for delegating with no metadata of their own. 538 // Lock-free access requires load_ptr_acquire. 539 Metaspace* metaspace = load_ptr_acquire(&_metaspace); 540 if (metaspace == NULL) { 541 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); 542 // Check if _metaspace got allocated while we were waiting for this lock. 543 if ((metaspace = _metaspace) == NULL) { 544 if (this == the_null_class_loader_data()) { 545 assert (class_loader() == NULL, "Must be"); 546 metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType); 547 } else if (is_anonymous()) { 548 if (class_loader() != NULL) { 549 log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name()); 550 } 551 metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType); 552 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) { 553 if (class_loader() != NULL) { 554 log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name()); 555 } 556 metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); 557 } else { 558 metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); 559 } 560 // Ensure _metaspace is stable, since it is examined without a lock 561 OrderAccess::release_store_ptr(&_metaspace, metaspace); 562 } 563 } 564 return metaspace; 565 } 566 567 JNIHandleBlock* ClassLoaderData::handles() const { return _handles; } 568 void ClassLoaderData::set_handles(JNIHandleBlock* handles) { _handles = handles; } 569 570 jobject ClassLoaderData::add_handle(Handle h) { 571 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 572 if (handles() == NULL) { 573 set_handles(JNIHandleBlock::allocate_block()); 574 } 575 return handles()->allocate_handle(h()); 576 } 577 578 void ClassLoaderData::remove_handle(jobject h) { 579 _handles->release_handle(h); 580 } 581 582 // Add this metadata pointer to be freed when it's safe. This is only during 583 // class unloading because Handles might point to this metadata field. 584 void ClassLoaderData::add_to_deallocate_list(Metadata* m) { 585 // Metadata in shared region isn't deleted. 586 if (!m->is_shared()) { 587 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 588 if (_deallocate_list == NULL) { 589 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); 590 } 591 _deallocate_list->append_if_missing(m); 592 } 593 } 594 595 // Deallocate free metadata on the free list. How useful the PermGen was! 596 void ClassLoaderData::free_deallocate_list() { 597 // Don't need lock, at safepoint 598 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 599 if (_deallocate_list == NULL) { 600 return; 601 } 602 // Go backwards because this removes entries that are freed. 603 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 604 Metadata* m = _deallocate_list->at(i); 605 if (!m->on_stack()) { 606 _deallocate_list->remove_at(i); 607 // There are only three types of metadata that we deallocate directly. 608 // Cast them so they can be used by the template function. 609 if (m->is_method()) { 610 MetadataFactory::free_metadata(this, (Method*)m); 611 } else if (m->is_constantPool()) { 612 MetadataFactory::free_metadata(this, (ConstantPool*)m); 613 } else if (m->is_klass()) { 614 MetadataFactory::free_metadata(this, (InstanceKlass*)m); 615 } else { 616 ShouldNotReachHere(); 617 } 618 } else { 619 // Metadata is alive. 620 // If scratch_class is on stack then it shouldn't be on this list! 621 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), 622 "scratch classes on this list should be dead"); 623 // Also should assert that other metadata on the list was found in handles. 624 } 625 } 626 } 627 628 // These anonymous class loaders are to contain classes used for JSR292 629 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(oop loader, TRAPS) { 630 // Add a new class loader data to the graph. 631 return ClassLoaderDataGraph::add(loader, true, THREAD); 632 } 633 634 const char* ClassLoaderData::loader_name() { 635 // Handles null class loader 636 return SystemDictionary::loader_name(class_loader()); 637 } 638 639 #ifndef PRODUCT 640 // Define to dump klasses 641 #undef CLD_DUMP_KLASSES 642 643 void ClassLoaderData::dump(outputStream * const out) { 644 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {", 645 p2i(this), p2i((void *)class_loader()), 646 p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name()); 647 if (claimed()) out->print(" claimed "); 648 if (is_unloading()) out->print(" unloading "); 649 out->print(" handles " INTPTR_FORMAT, p2i(handles())); 650 out->cr(); 651 if (metaspace_or_null() != NULL) { 652 out->print_cr("metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null())); 653 metaspace_or_null()->dump(out); 654 } else { 655 out->print_cr("metaspace: NULL"); 656 } 657 658 #ifdef CLD_DUMP_KLASSES 659 if (Verbose) { 660 Klass* k = _klasses; 661 while (k != NULL) { 662 out->print_cr("klass " PTR_FORMAT ", %s, CT: %d, MUT: %d", k, k->name()->as_C_string(), 663 k->has_modified_oops(), k->has_accumulated_modified_oops()); 664 assert(k != k->next_link(), "no loops!"); 665 k = k->next_link(); 666 } 667 } 668 #endif // CLD_DUMP_KLASSES 669 #undef CLD_DUMP_KLASSES 670 if (_jmethod_ids != NULL) { 671 Method::print_jmethod_ids(this, out); 672 } 673 out->print_cr("}"); 674 } 675 #endif // PRODUCT 676 677 void ClassLoaderData::verify() { 678 assert_locked_or_safepoint(_metaspace_lock); 679 oop cl = class_loader(); 680 681 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same"); 682 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be"); 683 684 // Verify the integrity of the allocated space. 685 if (metaspace_or_null() != NULL) { 686 metaspace_or_null()->verify(); 687 } 688 689 for (Klass* k = _klasses; k != NULL; k = k->next_link()) { 690 guarantee(k->class_loader_data() == this, "Must be the same"); 691 k->verify(); 692 assert(k != k->next_link(), "no loops!"); 693 } 694 } 695 696 bool ClassLoaderData::contains_klass(Klass* klass) { 697 // Lock-free access requires load_ptr_acquire 698 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) { 699 if (k == klass) return true; 700 } 701 return false; 702 } 703 704 705 // GC root of class loader data created. 706 ClassLoaderData* ClassLoaderDataGraph::_head = NULL; 707 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL; 708 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL; 709 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL; 710 711 bool ClassLoaderDataGraph::_should_purge = false; 712 bool ClassLoaderDataGraph::_metaspace_oom = false; 713 714 // Add a new class loader data node to the list. Assign the newly created 715 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field 716 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) { 717 // We need to allocate all the oops for the ClassLoaderData before allocating the 718 // actual ClassLoaderData object. 719 ClassLoaderData::Dependencies dependencies(CHECK_NULL); 720 721 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the 722 // ClassLoaderData in the graph since the CLD 723 // contains unhandled oops 724 725 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies); 726 727 728 if (!is_anonymous) { 729 ClassLoaderData** cld_addr = java_lang_ClassLoader::loader_data_addr(loader()); 730 // First, Atomically set it 731 ClassLoaderData* old = (ClassLoaderData*) Atomic::cmpxchg_ptr(cld, cld_addr, NULL); 732 if (old != NULL) { 733 delete cld; 734 // Returns the data. 735 return old; 736 } 737 } 738 739 // We won the race, and therefore the task of adding the data to the list of 740 // class loader data 741 ClassLoaderData** list_head = &_head; 742 ClassLoaderData* next = _head; 743 744 do { 745 cld->set_next(next); 746 ClassLoaderData* exchanged = (ClassLoaderData*)Atomic::cmpxchg_ptr(cld, list_head, next); 747 if (exchanged == next) { 748 if (log_is_enabled(Debug, class, loader, data)) { 749 PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual 750 log_creation(loader, cld, CHECK_NULL); 751 } 752 return cld; 753 } 754 next = exchanged; 755 } while (true); 756 } 757 758 void ClassLoaderDataGraph::log_creation(Handle loader, ClassLoaderData* cld, TRAPS) { 759 Handle string; 760 if (loader.not_null()) { 761 // Include the result of loader.toString() in the output. This allows 762 // the user of the log to identify the class loader instance. 763 JavaValue result(T_OBJECT); 764 KlassHandle spec_klass(THREAD, SystemDictionary::ClassLoader_klass()); 765 JavaCalls::call_virtual(&result, 766 loader, 767 spec_klass, 768 vmSymbols::toString_name(), 769 vmSymbols::void_string_signature(), 770 CHECK); 771 assert(result.get_type() == T_OBJECT, "just checking"); 772 string = (oop)result.get_jobject(); 773 } 774 775 ResourceMark rm; 776 outputStream* log = Log(class, loader, data)::debug_stream(); 777 log->print("create class loader data " INTPTR_FORMAT, p2i(cld)); 778 log->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)cld->class_loader()), 779 cld->loader_name()); 780 781 if (string.not_null()) { 782 log->print(": "); 783 java_lang_String::print(string(), log); 784 } 785 log->cr(); 786 } 787 788 789 void ClassLoaderDataGraph::oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) { 790 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 791 cld->oops_do(f, klass_closure, must_claim); 792 } 793 } 794 795 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) { 796 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 797 if (cld->keep_alive()) { 798 cld->oops_do(f, klass_closure, must_claim); 799 } 800 } 801 } 802 803 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, KlassClosure* klass_closure, bool must_claim) { 804 if (ClassUnloading) { 805 keep_alive_oops_do(f, klass_closure, must_claim); 806 } else { 807 oops_do(f, klass_closure, must_claim); 808 } 809 } 810 811 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) { 812 for (ClassLoaderData* cld = _head; cl != NULL && cld != NULL; cld = cld->next()) { 813 cl->do_cld(cld); 814 } 815 } 816 817 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) { 818 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 819 // Only walk the head until any clds not purged from prior unloading 820 // (CMS doesn't purge right away). 821 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 822 assert(cld->is_unloading(), "invariant"); 823 cl->do_cld(cld); 824 } 825 } 826 827 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) { 828 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->_next) { 829 CLDClosure* closure = cld->keep_alive() ? strong : weak; 830 if (closure != NULL) { 831 closure->do_cld(cld); 832 } 833 } 834 } 835 836 void ClassLoaderDataGraph::keep_alive_cld_do(CLDClosure* cl) { 837 roots_cld_do(cl, NULL); 838 } 839 840 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) { 841 if (ClassUnloading) { 842 keep_alive_cld_do(cl); 843 } else { 844 cld_do(cl); 845 } 846 } 847 848 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) { 849 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 850 cld->classes_do(klass_closure); 851 } 852 } 853 854 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) { 855 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 856 cld->classes_do(f); 857 } 858 } 859 860 void ClassLoaderDataGraph::methods_do(void f(Method*)) { 861 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 862 cld->methods_do(f); 863 } 864 } 865 866 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) { 867 assert_locked_or_safepoint(Module_lock); 868 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 869 cld->modules_do(f); 870 } 871 } 872 873 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) { 874 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 875 // Only walk the head until any clds not purged from prior unloading 876 // (CMS doesn't purge right away). 877 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 878 assert(cld->is_unloading(), "invariant"); 879 cld->modules_do(f); 880 } 881 } 882 883 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) { 884 assert_locked_or_safepoint(Module_lock); 885 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 886 cld->packages_do(f); 887 } 888 } 889 890 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) { 891 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 892 // Only walk the head until any clds not purged from prior unloading 893 // (CMS doesn't purge right away). 894 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 895 assert(cld->is_unloading(), "invariant"); 896 cld->packages_do(f); 897 } 898 } 899 900 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) { 901 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { 902 cld->loaded_classes_do(klass_closure); 903 } 904 } 905 906 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) { 907 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 908 // Only walk the head until any clds not purged from prior unloading 909 // (CMS doesn't purge right away). 910 for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) { 911 assert(cld->is_unloading(), "invariant"); 912 cld->classes_do(f); 913 } 914 } 915 916 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() { 917 assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?"); 918 919 GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>(); 920 921 // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true); 922 ClassLoaderData* curr = _head; 923 while (curr != _saved_head) { 924 if (!curr->claimed()) { 925 array->push(curr); 926 927 if (log_is_enabled(Debug, class, loader, data)) { 928 outputStream* log = Log(class, loader, data)::debug_stream(); 929 log->print("found new CLD: "); 930 curr->print_value_on(log); 931 log->cr(); 932 } 933 } 934 935 curr = curr->_next; 936 } 937 938 return array; 939 } 940 941 bool ClassLoaderDataGraph::unload_list_contains(const void* x) { 942 assert(SafepointSynchronize::is_at_safepoint(), "only safe to call at safepoint"); 943 for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) { 944 if (cld->metaspace_or_null() != NULL && cld->metaspace_or_null()->contains(x)) { 945 return true; 946 } 947 } 948 return false; 949 } 950 951 #ifndef PRODUCT 952 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) { 953 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 954 if (loader_data == data) { 955 return true; 956 } 957 } 958 959 return false; 960 } 961 #endif // PRODUCT 962 963 964 // Move class loader data from main list to the unloaded list for unloading 965 // and deallocation later. 966 bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure, 967 bool clean_previous_versions) { 968 969 ClassLoaderData* data = _head; 970 ClassLoaderData* prev = NULL; 971 bool seen_dead_loader = false; 972 973 // Mark metadata seen on the stack only so we can delete unneeded entries. 974 // Only walk all metadata, including the expensive code cache walk, for Full GC 975 // and only if class redefinition and if there's previous versions of 976 // Klasses to delete. 977 bool walk_all_metadata = clean_previous_versions && 978 JvmtiExport::has_redefined_a_class() && 979 InstanceKlass::has_previous_versions_and_reset(); 980 MetadataOnStackMark md_on_stack(walk_all_metadata); 981 982 // Save previous _unloading pointer for CMS which may add to unloading list before 983 // purging and we don't want to rewalk the previously unloaded class loader data. 984 _saved_unloading = _unloading; 985 986 data = _head; 987 while (data != NULL) { 988 if (data->is_alive(is_alive_closure)) { 989 // clean metaspace 990 if (walk_all_metadata) { 991 data->classes_do(InstanceKlass::purge_previous_versions); 992 } 993 data->free_deallocate_list(); 994 prev = data; 995 data = data->next(); 996 continue; 997 } 998 seen_dead_loader = true; 999 ClassLoaderData* dead = data; 1000 dead->unload(); 1001 data = data->next(); 1002 // Remove from loader list. 1003 // This class loader data will no longer be found 1004 // in the ClassLoaderDataGraph. 1005 if (prev != NULL) { 1006 prev->set_next(data); 1007 } else { 1008 assert(dead == _head, "sanity check"); 1009 _head = data; 1010 } 1011 dead->set_next(_unloading); 1012 _unloading = dead; 1013 } 1014 1015 if (seen_dead_loader) { 1016 // Walk a ModuleEntry's reads and a PackageEntry's exports lists 1017 // to determine if there are modules on those lists that are now 1018 // dead and should be removed. A module's life cycle is equivalent 1019 // to its defining class loader's life cycle. Since a module is 1020 // considered dead if its class loader is dead, these walks must 1021 // occur after each class loader's aliveness is determined. 1022 data = _head; 1023 while (data != NULL) { 1024 if (data->packages_defined()) { 1025 data->packages()->purge_all_package_exports(); 1026 } 1027 if (data->modules_defined()) { 1028 data->modules()->purge_all_module_reads(); 1029 } 1030 data = data->next(); 1031 } 1032 1033 post_class_unload_events(); 1034 } 1035 1036 return seen_dead_loader; 1037 } 1038 1039 void ClassLoaderDataGraph::purge() { 1040 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1041 ClassLoaderData* list = _unloading; 1042 _unloading = NULL; 1043 ClassLoaderData* next = list; 1044 bool classes_unloaded = false; 1045 while (next != NULL) { 1046 ClassLoaderData* purge_me = next; 1047 next = purge_me->next(); 1048 delete purge_me; 1049 classes_unloaded = true; 1050 } 1051 if (classes_unloaded) { 1052 Metaspace::purge(); 1053 set_metaspace_oom(false); 1054 } 1055 } 1056 1057 void ClassLoaderDataGraph::post_class_unload_events() { 1058 #if INCLUDE_TRACE 1059 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); 1060 if (Tracing::enabled()) { 1061 if (Tracing::is_event_enabled(TraceClassUnloadEvent)) { 1062 assert(_unloading != NULL, "need class loader data unload list!"); 1063 _class_unload_time = Ticks::now(); 1064 classes_unloading_do(&class_unload_event); 1065 } 1066 Tracing::on_unloading_classes(); 1067 } 1068 #endif 1069 } 1070 1071 // CDS support 1072 1073 // Global metaspaces for writing information to the shared archive. When 1074 // application CDS is supported, we may need one per metaspace, so this 1075 // sort of looks like it. 1076 Metaspace* ClassLoaderData::_ro_metaspace = NULL; 1077 Metaspace* ClassLoaderData::_rw_metaspace = NULL; 1078 static bool _shared_metaspaces_initialized = false; 1079 1080 // Initialize shared metaspaces (change to call from somewhere not lazily) 1081 void ClassLoaderData::initialize_shared_metaspaces() { 1082 assert(DumpSharedSpaces, "only use this for dumping shared spaces"); 1083 assert(this == ClassLoaderData::the_null_class_loader_data(), 1084 "only supported for null loader data for now"); 1085 assert (!_shared_metaspaces_initialized, "only initialize once"); 1086 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 1087 _ro_metaspace = new Metaspace(_metaspace_lock, Metaspace::ROMetaspaceType); 1088 _rw_metaspace = new Metaspace(_metaspace_lock, Metaspace::ReadWriteMetaspaceType); 1089 _shared_metaspaces_initialized = true; 1090 } 1091 1092 Metaspace* ClassLoaderData::ro_metaspace() { 1093 assert(_ro_metaspace != NULL, "should already be initialized"); 1094 return _ro_metaspace; 1095 } 1096 1097 Metaspace* ClassLoaderData::rw_metaspace() { 1098 assert(_rw_metaspace != NULL, "should already be initialized"); 1099 return _rw_metaspace; 1100 } 1101 1102 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic() 1103 : _next_klass(NULL) { 1104 ClassLoaderData* cld = ClassLoaderDataGraph::_head; 1105 Klass* klass = NULL; 1106 1107 // Find the first klass in the CLDG. 1108 while (cld != NULL) { 1109 assert_locked_or_safepoint(cld->metaspace_lock()); 1110 klass = cld->_klasses; 1111 if (klass != NULL) { 1112 _next_klass = klass; 1113 return; 1114 } 1115 cld = cld->next(); 1116 } 1117 } 1118 1119 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) { 1120 Klass* next = klass->next_link(); 1121 if (next != NULL) { 1122 return next; 1123 } 1124 1125 // No more klasses in the current CLD. Time to find a new CLD. 1126 ClassLoaderData* cld = klass->class_loader_data(); 1127 assert_locked_or_safepoint(cld->metaspace_lock()); 1128 while (next == NULL) { 1129 cld = cld->next(); 1130 if (cld == NULL) { 1131 break; 1132 } 1133 next = cld->_klasses; 1134 } 1135 1136 return next; 1137 } 1138 1139 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() { 1140 Klass* head = _next_klass; 1141 1142 while (head != NULL) { 1143 Klass* next = next_klass_in_cldg(head); 1144 1145 Klass* old_head = (Klass*)Atomic::cmpxchg_ptr(next, &_next_klass, head); 1146 1147 if (old_head == head) { 1148 return head; // Won the CAS. 1149 } 1150 1151 head = old_head; 1152 } 1153 1154 // Nothing more for the iterator to hand out. 1155 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head)); 1156 return NULL; 1157 } 1158 1159 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() { 1160 _data = ClassLoaderDataGraph::_head; 1161 } 1162 1163 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {} 1164 1165 #ifndef PRODUCT 1166 // callable from debugger 1167 extern "C" int print_loader_data_graph() { 1168 ClassLoaderDataGraph::dump_on(tty); 1169 return 0; 1170 } 1171 1172 void ClassLoaderDataGraph::verify() { 1173 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1174 data->verify(); 1175 } 1176 } 1177 1178 void ClassLoaderDataGraph::dump_on(outputStream * const out) { 1179 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { 1180 data->dump(out); 1181 } 1182 MetaspaceAux::dump(out); 1183 } 1184 #endif // PRODUCT 1185 1186 void ClassLoaderData::print_value_on(outputStream* out) const { 1187 if (class_loader() == NULL) { 1188 out->print("NULL class_loader"); 1189 } else { 1190 out->print("class loader " INTPTR_FORMAT " ", p2i(this)); 1191 class_loader()->print_value_on(out); 1192 } 1193 } 1194 1195 #if INCLUDE_TRACE 1196 1197 Ticks ClassLoaderDataGraph::_class_unload_time; 1198 1199 void ClassLoaderDataGraph::class_unload_event(Klass* const k) { 1200 assert(k != NULL, "invariant"); 1201 1202 // post class unload event 1203 EventClassUnload event(UNTIMED); 1204 event.set_endtime(_class_unload_time); 1205 event.set_unloadedClass(k); 1206 event.set_definingClassLoader(k->class_loader_data()); 1207 event.commit(); 1208 } 1209 1210 #endif // INCLUDE_TRACE