< prev index next >

src/hotspot/share/classfile/classLoaderData.cpp

Print this page




  30 // according to its name resolution strategy.
  31 //
  32 // Class loaders that implement a deterministic name resolution strategy
  33 // (including with respect to their delegation behavior), such as the boot, the
  34 // platform, and the system loaders of the JDK's built-in class loader
  35 // hierarchy, always produce the same linkset for a given configuration.
  36 //
  37 // ClassLoaderData carries information related to a linkset (e.g.,
  38 // metaspace holding its klass definitions).
  39 // The System Dictionary and related data structures (e.g., placeholder table,
  40 // loader constraints table) as well as the runtime representation of classes
  41 // only reference ClassLoaderData.
  42 //
  43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that
  44 // that represent the loader's "linking domain" in the JVM.
  45 //
  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"
  50 #include "classfile/classLoaderData.hpp"
  51 #include "classfile/classLoaderData.inline.hpp"

  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/metadataOnStackMark.hpp"
  55 #include "classfile/moduleEntry.hpp"
  56 #include "classfile/packageEntry.hpp"
  57 #include "classfile/symbolTable.hpp"
  58 #include "classfile/systemDictionary.hpp"
  59 #include "logging/log.hpp"
  60 #include "logging/logStream.hpp"
  61 #include "memory/allocation.inline.hpp"
  62 #include "memory/metadataFactory.hpp"
  63 #include "memory/metaspaceShared.hpp"
  64 #include "memory/resourceArea.hpp"
  65 #include "memory/universe.hpp"
  66 #include "oops/access.inline.hpp"
  67 #include "oops/oop.inline.hpp"
  68 #include "oops/oopHandle.inline.hpp"
  69 #include "oops/weakHandle.inline.hpp"
  70 #include "runtime/atomic.hpp"
  71 #include "runtime/handles.inline.hpp"
  72 #include "runtime/mutex.hpp"
  73 #include "runtime/orderAccess.hpp"
  74 #include "runtime/safepoint.hpp"
  75 #include "runtime/safepointVerifiers.hpp"
  76 #include "utilities/growableArray.hpp"
  77 #include "utilities/macros.hpp"
  78 #include "utilities/ostream.hpp"
  79 
  80 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0;
  81 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0;
  82 
  83 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  84 
  85 void ClassLoaderData::init_null_class_loader_data() {
  86   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  87   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  88 
  89   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  90   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  91   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  92 
  93   LogTarget(Trace, class, loader, data) lt;
  94   if (lt.is_enabled()) {
  95     ResourceMark rm;
  96     LogStream ls(lt);
  97     ls.print("create ");
  98     _the_null_class_loader_data->print_value_on(&ls);
  99     ls.cr();
 100   }
 101 }
 102 


 427   // It's a dependency we won't find through GC, add it.
 428   if (!_handles.contains(to)) {
 429     NOT_PRODUCT(Atomic::inc(&_dependency_count));
 430     LogTarget(Trace, class, loader, data) lt;
 431     if (lt.is_enabled()) {
 432       ResourceMark rm;
 433       LogStream ls(lt);
 434       ls.print("adding dependency from ");
 435       print_value_on(&ls);
 436       ls.print(" to ");
 437       to_cld->print_value_on(&ls);
 438       ls.cr();
 439     }
 440     Handle dependency(Thread::current(), to);
 441     add_handle(dependency);
 442     // Added a potentially young gen oop to the ClassLoaderData
 443     record_modified_oops();
 444   }
 445 }
 446 
 447 
 448 void ClassLoaderDataGraph::clear_claimed_marks() {
 449   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
 450     cld->clear_claimed();
 451   }
 452 }
 453 
 454 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
 455   {
 456     MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 457     Klass* old_value = _klasses;
 458     k->set_next_link(old_value);
 459     // Link the new item into the list, making sure the linked class is stable
 460     // since the list can be walked without a lock
 461     OrderAccess::release_store(&_klasses, k);
 462     if (k->is_array_klass()) {
 463       ClassLoaderDataGraph::inc_array_classes(1);
 464     } else {
 465       ClassLoaderDataGraph::inc_instance_classes(1);
 466     }
 467   }
 468 
 469   if (publicize) {
 470     LogTarget(Trace, class, loader, data) lt;
 471     if (lt.is_enabled()) {
 472       ResourceMark rm;
 473       LogStream ls(lt);
 474       ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name());
 475       print_value_on(&ls);
 476       ls.cr();
 477     }
 478   }
 479 }
 480 
 481 // Class iterator used by the compiler.  It gets some number of classes at
 482 // a safepoint to decay invocation counters on the methods.
 483 class ClassLoaderDataGraphKlassIteratorStatic {
 484   ClassLoaderData* _current_loader_data;
 485   Klass*           _current_class_entry;
 486  public:
 487 
 488   ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {}
 489 
 490   InstanceKlass* try_get_next_class() {
 491     assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 492     size_t max_classes = ClassLoaderDataGraph::num_instance_classes();
 493     assert(max_classes > 0, "should not be called with no instance classes");
 494     for (size_t i = 0; i < max_classes; ) {
 495 
 496       if (_current_class_entry != NULL) {
 497         Klass* k = _current_class_entry;
 498         _current_class_entry = _current_class_entry->next_link();
 499 
 500         if (k->is_instance_klass()) {
 501           InstanceKlass* ik = InstanceKlass::cast(k);
 502           i++;  // count all instance classes found
 503           // Not yet loaded classes are counted in max_classes
 504           // but only return loaded classes.
 505           if (ik->is_loaded()) {
 506             return ik;
 507           }
 508         }
 509       } else {
 510         // Go to next CLD
 511         if (_current_loader_data != NULL) {
 512           _current_loader_data = _current_loader_data->next();
 513         }
 514         // Start at the beginning
 515         if (_current_loader_data == NULL) {
 516           _current_loader_data = ClassLoaderDataGraph::_head;
 517         }
 518 
 519         _current_class_entry = _current_loader_data->klasses();
 520       }
 521     }
 522     // Should never be reached unless all instance classes have failed or are not fully loaded.
 523     // Caller handles NULL.
 524     return NULL;
 525   }
 526 
 527   // If the current class for the static iterator is a class being unloaded or
 528   // deallocated, adjust the current class.
 529   void adjust_saved_class(ClassLoaderData* cld) {
 530     if (_current_loader_data == cld) {
 531       _current_loader_data = cld->next();
 532       if (_current_loader_data != NULL) {
 533         _current_class_entry = _current_loader_data->klasses();
 534       }  // else try_get_next_class will start at the head
 535     }
 536   }
 537 
 538   void adjust_saved_class(Klass* klass) {
 539     if (_current_class_entry == klass) {
 540       _current_class_entry = klass->next_link();
 541     }
 542   }
 543 };
 544 
 545 static ClassLoaderDataGraphKlassIteratorStatic static_klass_iterator;
 546 
 547 InstanceKlass* ClassLoaderDataGraph::try_get_next_class() {
 548   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 549   return static_klass_iterator.try_get_next_class();
 550 }
 551 
 552 
 553 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) {
 554   if (loader_or_mirror() != NULL) {
 555     assert(_holder.is_null(), "never replace holders");
 556     _holder = WeakHandle<vm_class_loader_data>::create(loader_or_mirror);
 557   }
 558 }
 559 
 560 // Remove a klass from the _klasses list for scratch_class during redefinition
 561 // or parsed class in the case of an error.
 562 void ClassLoaderData::remove_class(Klass* scratch_class) {
 563   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 564 
 565   // Adjust global class iterator.
 566   static_klass_iterator.adjust_saved_class(scratch_class);
 567 
 568   Klass* prev = NULL;
 569   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 570     if (k == scratch_class) {
 571       if (prev == NULL) {
 572         _klasses = k->next_link();
 573       } else {
 574         Klass* next = k->next_link();
 575         prev->set_next_link(next);
 576       }
 577 
 578       if (k->is_array_klass()) {
 579         ClassLoaderDataGraph::dec_array_classes(1);
 580       } else {
 581         ClassLoaderDataGraph::dec_instance_classes(1);
 582       }
 583 
 584       return;
 585     }
 586     prev = k;


 594 
 595   LogTarget(Trace, class, loader, data) lt;
 596   if (lt.is_enabled()) {
 597     ResourceMark rm;
 598     LogStream ls(lt);
 599     ls.print("unload");
 600     print_value_on(&ls);
 601     ls.cr();
 602   }
 603 
 604   // Some items on the _deallocate_list need to free their C heap structures
 605   // if they are not already on the _klasses list.
 606   free_deallocate_list_C_heap_structures();
 607 
 608   // Clean up class dependencies and tell serviceability tools
 609   // these classes are unloading.  Must be called
 610   // after erroneous classes are released.
 611   classes_do(InstanceKlass::unload_class);
 612 
 613   // Clean up global class iterator for compiler
 614   static_klass_iterator.adjust_saved_class(this);
 615 }
 616 
 617 ModuleEntryTable* ClassLoaderData::modules() {
 618   // Lazily create the module entry table at first request.
 619   // Lock-free access requires load_acquire.
 620   ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
 621   if (modules == NULL) {
 622     MutexLocker m1(Module_lock);
 623     // Check if _modules got allocated while we were waiting for this lock.
 624     if ((modules = _modules) == NULL) {
 625       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 626 
 627       {
 628         MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 629         // Ensure _modules is stable, since it is examined without a lock
 630         OrderAccess::release_store(&_modules, modules);
 631       }
 632     }
 633   }
 634   return modules;


 897         MetadataFactory::free_metadata(this, (Method*)m);
 898       } else if (m->is_constantPool()) {
 899         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 900       } else if (m->is_klass()) {
 901         MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 902       } else {
 903         ShouldNotReachHere();
 904       }
 905     } else {
 906       // Metadata is alive.
 907       // If scratch_class is on stack then it shouldn't be on this list!
 908       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 909              "scratch classes on this list should be dead");
 910       // Also should assert that other metadata on the list was found in handles.
 911       // Some cleaning remains.
 912       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 913     }
 914   }
 915 }
 916 
 917 void ClassLoaderDataGraph::clean_deallocate_lists(bool walk_previous_versions) {
 918   assert(SafepointSynchronize::is_at_safepoint(), "must only be called at safepoint");
 919   uint loaders_processed = 0;
 920   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
 921     // is_alive check will be necessary for concurrent class unloading.
 922     if (cld->is_alive()) {
 923       // clean metaspace
 924       if (walk_previous_versions) {
 925         cld->classes_do(InstanceKlass::purge_previous_versions);
 926       }
 927       cld->free_deallocate_list();
 928       loaders_processed++;
 929     }
 930   }
 931   log_debug(class, loader, data)("clean_deallocate_lists: loaders processed %u %s",
 932                                  loaders_processed, walk_previous_versions ? "walk_previous_versions" : "");
 933 }
 934 
 935 void ClassLoaderDataGraph::walk_metadata_and_clean_metaspaces() {
 936   assert(SafepointSynchronize::is_at_safepoint(), "must only be called at safepoint");
 937 
 938   _should_clean_deallocate_lists = false; // assume everything gets cleaned
 939 
 940   // Mark metadata seen on the stack so we can delete unreferenced entries.
 941   // Walk all metadata, including the expensive code cache walk, only for class redefinition.
 942   // The MetadataOnStackMark walk during redefinition saves previous versions if it finds old methods
 943   // on the stack or in the code cache, so we only have to repeat the full walk if
 944   // they were found at that time.
 945   // TODO: have redefinition clean old methods out of the code cache.  They still exist in some places.
 946   bool walk_all_metadata = InstanceKlass::has_previous_versions_and_reset();
 947 
 948   MetadataOnStackMark md_on_stack(walk_all_metadata);
 949   clean_deallocate_lists(walk_all_metadata);
 950 }
 951 
 952 // This is distinct from free_deallocate_list.  For class loader data that are
 953 // unloading, this frees the C heap memory for items on the list, and unlinks
 954 // scratch or error classes so that unloading events aren't triggered for these
 955 // classes. The metadata is removed with the unloading metaspace.
 956 // There isn't C heap memory allocated for methods, so nothing is done for them.
 957 void ClassLoaderData::free_deallocate_list_C_heap_structures() {
 958   // Don't need lock, at safepoint
 959   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 960   assert(is_unloading(), "only called for ClassLoaderData that are unloading");
 961   if (_deallocate_list == NULL) {
 962     return;
 963   }
 964   // Go backwards because this removes entries that are freed.
 965   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 966     Metadata* m = _deallocate_list->at(i);
 967     _deallocate_list->remove_at(i);
 968     if (m->is_constantPool()) {
 969       ((ConstantPool*)m)->release_C_heap_structures();
 970     } else if (m->is_klass()) {
 971       InstanceKlass* ik = (InstanceKlass*)m;


1053 
1054   // Verify the integrity of the allocated space.
1055   if (metaspace_or_null() != NULL) {
1056     metaspace_or_null()->verify();
1057   }
1058 
1059   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
1060     guarantee(k->class_loader_data() == this, "Must be the same");
1061     k->verify();
1062     assert(k != k->next_link(), "no loops!");
1063   }
1064 }
1065 
1066 bool ClassLoaderData::contains_klass(Klass* klass) {
1067   // Lock-free access requires load_acquire
1068   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
1069     if (k == klass) return true;
1070   }
1071   return false;
1072 }
1073 
1074 
1075 // GC root of class loader data created.
1076 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
1077 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
1078 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
1079 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
1080 
1081 bool ClassLoaderDataGraph::_should_purge = false;
1082 bool ClassLoaderDataGraph::_should_clean_deallocate_lists = false;
1083 bool ClassLoaderDataGraph::_safepoint_cleanup_needed = false;
1084 bool ClassLoaderDataGraph::_metaspace_oom = false;
1085 
1086 // Add a new class loader data node to the list.  Assign the newly created
1087 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
1088 ClassLoaderData* ClassLoaderDataGraph::add_to_graph(Handle loader, bool is_unsafe_anonymous) {
1089 
1090   assert_lock_strong(ClassLoaderDataGraph_lock);
1091 
1092   ClassLoaderData* cld;
1093 
1094   // First check if another thread beat us to creating the CLD and installing
1095   // it into the loader while we were waiting for the lock.
1096   if (!is_unsafe_anonymous && loader.not_null()) {
1097     cld = java_lang_ClassLoader::loader_data_acquire(loader());
1098     if (cld != NULL) {
1099       return cld;
1100     }
1101   }
1102 
1103   // We mustn't GC until we've installed the ClassLoaderData in the Graph since the CLD
1104   // contains oops in _handles that must be walked.  GC doesn't walk CLD from the
1105   // loader oop in all collections, particularly young collections.
1106   NoSafepointVerifier no_safepoints;
1107 
1108   cld = new ClassLoaderData(loader, is_unsafe_anonymous);
1109 
1110   // First install the new CLD to the Graph.
1111   cld->set_next(_head);
1112   _head = cld;
1113 
1114   // Next associate with the class_loader.
1115   if (!is_unsafe_anonymous) {
1116     // Use OrderAccess, since readers need to get the loader_data only after
1117     // it's added to the Graph
1118     java_lang_ClassLoader::release_set_loader_data(loader(), cld);
1119   }
1120 
1121   // Lastly log, if requested
1122   LogTarget(Trace, class, loader, data) lt;
1123   if (lt.is_enabled()) {
1124     ResourceMark rm;
1125     LogStream ls(lt);
1126     ls.print("create ");
1127     cld->print_value_on(&ls);
1128     ls.cr();
1129   }
1130   return cld;
1131 }
1132 
1133 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_unsafe_anonymous) {
1134   MutexLocker ml(ClassLoaderDataGraph_lock);
1135   ClassLoaderData* loader_data = add_to_graph(loader, is_unsafe_anonymous);
1136   return loader_data;
1137 }
1138 
1139 void ClassLoaderDataGraph::cld_do(CLDClosure* cl) {
1140   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1141   for (ClassLoaderData* cld = _head;  cld != NULL; cld = cld->_next) {
1142     cl->do_cld(cld);
1143   }
1144 }
1145 
1146 void ClassLoaderDataGraph::cld_unloading_do(CLDClosure* cl) {
1147   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1148   // Only walk the head until any clds not purged from prior unloading
1149   // (CMS doesn't purge right away).
1150   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1151     assert(cld->is_unloading(), "invariant");
1152     cl->do_cld(cld);
1153   }
1154 }
1155 
1156 void ClassLoaderDataGraph::roots_cld_do(CLDClosure* strong, CLDClosure* weak) {
1157   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1158   for (ClassLoaderData* cld = _head;  cld != NULL; cld = cld->_next) {
1159     CLDClosure* closure = cld->keep_alive() ? strong : weak;
1160     if (closure != NULL) {
1161       closure->do_cld(cld);
1162     }
1163   }
1164 }
1165 
1166 void ClassLoaderDataGraph::always_strong_cld_do(CLDClosure* cl) {
1167   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1168   if (ClassUnloading) {
1169     roots_cld_do(cl, NULL);
1170   } else {
1171     cld_do(cl);
1172   }
1173 }
1174 
1175 // Closure for locking and iterating through classes.
1176 LockedClassesDo::LockedClassesDo(classes_do_func_t f) : _function(f) {
1177   ClassLoaderDataGraph_lock->lock();
1178 }
1179 
1180 LockedClassesDo::LockedClassesDo() : _function(NULL) {
1181   // callers provide their own do_klass
1182   ClassLoaderDataGraph_lock->lock();
1183 }
1184 
1185 LockedClassesDo::~LockedClassesDo() { ClassLoaderDataGraph_lock->unlock(); }
1186 
1187 
1188 // Iterating over the CLDG needs to be locked because
1189 // unloading can remove entries concurrently soon.
1190 class ClassLoaderDataGraphIterator : public StackObj {
1191   ClassLoaderData* _next;
1192   HandleMark       _hm;  // clean up handles when this is done.
1193   Handle           _holder;
1194   Thread*          _thread;
1195 
1196   void hold_next() {
1197     if (_next != NULL) {
1198       _holder = Handle(_thread, _next->holder_phantom());
1199     }
1200   }
1201 public:
1202   ClassLoaderDataGraphIterator() : _next(ClassLoaderDataGraph::_head) {
1203     _thread = Thread::current();
1204     assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1205     hold_next();
1206   }
1207 
1208   bool repeat() const {
1209     return _next != NULL;
1210   }
1211 
1212   ClassLoaderData* get_next() {
1213     ClassLoaderData* next = _next;
1214     if (_next != NULL) {
1215       _next = _next->next();
1216       hold_next();
1217     }
1218     return next;
1219   }
1220 };
1221 
1222 // These functions assume that the caller has locked the ClassLoaderDataGraph_lock
1223 // if they are not calling the function from a safepoint.
1224 void ClassLoaderDataGraph::classes_do(KlassClosure* klass_closure) {
1225   ClassLoaderDataGraphIterator iter;
1226   while (iter.repeat()) {
1227     ClassLoaderData* cld = iter.get_next();
1228     cld->classes_do(klass_closure);
1229   }
1230 }
1231 
1232 void ClassLoaderDataGraph::classes_do(void f(Klass* const)) {
1233   ClassLoaderDataGraphIterator iter;
1234   while (iter.repeat()) {
1235     ClassLoaderData* cld = iter.get_next();
1236     cld->classes_do(f);
1237   }
1238 }
1239 
1240 void ClassLoaderDataGraph::methods_do(void f(Method*)) {
1241   ClassLoaderDataGraphIterator iter;
1242   while (iter.repeat()) {
1243     ClassLoaderData* cld = iter.get_next();
1244     cld->methods_do(f);
1245   }
1246 }
1247 
1248 void ClassLoaderDataGraph::modules_do(void f(ModuleEntry*)) {
1249   assert_locked_or_safepoint(Module_lock);
1250   ClassLoaderDataGraphIterator iter;
1251   while (iter.repeat()) {
1252     ClassLoaderData* cld = iter.get_next();
1253     cld->modules_do(f);
1254   }
1255 }
1256 
1257 void ClassLoaderDataGraph::modules_unloading_do(void f(ModuleEntry*)) {
1258   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1259   // Only walk the head until any clds not purged from prior unloading
1260   // (CMS doesn't purge right away).
1261   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1262     assert(cld->is_unloading(), "invariant");
1263     cld->modules_do(f);
1264   }
1265 }
1266 
1267 void ClassLoaderDataGraph::packages_do(void f(PackageEntry*)) {
1268   assert_locked_or_safepoint(Module_lock);
1269   ClassLoaderDataGraphIterator iter;
1270   while (iter.repeat()) {
1271     ClassLoaderData* cld = iter.get_next();
1272     cld->packages_do(f);
1273   }
1274 }
1275 
1276 void ClassLoaderDataGraph::packages_unloading_do(void f(PackageEntry*)) {
1277   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1278   // Only walk the head until any clds not purged from prior unloading
1279   // (CMS doesn't purge right away).
1280   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1281     assert(cld->is_unloading(), "invariant");
1282     cld->packages_do(f);
1283   }
1284 }
1285 
1286 void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) {
1287   ClassLoaderDataGraphIterator iter;
1288   while (iter.repeat()) {
1289     ClassLoaderData* cld = iter.get_next();
1290     cld->loaded_classes_do(klass_closure);
1291   }
1292 }
1293 
1294 // This case can block but cannot do unloading (called from CDS)
1295 void ClassLoaderDataGraph::unlocked_loaded_classes_do(KlassClosure* klass_closure) {
1296   for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1297     cld->loaded_classes_do(klass_closure);
1298   }
1299 }
1300 
1301 
1302 void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) {
1303   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1304   // Only walk the head until any clds not purged from prior unloading
1305   // (CMS doesn't purge right away).
1306   for (ClassLoaderData* cld = _unloading; cld != _saved_unloading; cld = cld->next()) {
1307     assert(cld->is_unloading(), "invariant");
1308     cld->classes_do(f);
1309   }
1310 }
1311 
1312 #define FOR_ALL_DICTIONARY(X)   ClassLoaderDataGraphIterator iter; \
1313                                 ClassLoaderData* X; \
1314                                 while ((X = iter.get_next()) != NULL) \
1315                                   if (X->dictionary() != NULL)
1316 
1317 // Walk classes in the loaded class dictionaries in various forms.
1318 // Only walks the classes defined in this class loader.
1319 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*)) {
1320   FOR_ALL_DICTIONARY(cld) {
1321     cld->dictionary()->classes_do(f);
1322   }
1323 }
1324 
1325 // Only walks the classes defined in this class loader.
1326 void ClassLoaderDataGraph::dictionary_classes_do(void f(InstanceKlass*, TRAPS), TRAPS) {
1327   FOR_ALL_DICTIONARY(cld) {
1328     cld->dictionary()->classes_do(f, CHECK);
1329   }
1330 }
1331 
1332 void ClassLoaderDataGraph::verify_dictionary() {
1333   FOR_ALL_DICTIONARY(cld) {
1334     cld->dictionary()->verify();
1335   }
1336 }
1337 
1338 void ClassLoaderDataGraph::print_dictionary(outputStream* st) {
1339   FOR_ALL_DICTIONARY(cld) {
1340     st->print("Dictionary for ");
1341     cld->print_value_on(st);
1342     st->cr();
1343     cld->dictionary()->print_on(st);
1344     st->cr();
1345   }
1346 }
1347 
1348 void ClassLoaderDataGraph::print_dictionary_statistics(outputStream* st) {
1349   FOR_ALL_DICTIONARY(cld) {
1350     ResourceMark rm;
1351     stringStream tempst;
1352     tempst.print("System Dictionary for %s class loader", cld->loader_name_and_id());
1353     cld->dictionary()->print_table_statistics(st, tempst.as_string());
1354   }
1355 }
1356 
1357 GrowableArray<ClassLoaderData*>* ClassLoaderDataGraph::new_clds() {
1358   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1359   assert(_head == NULL || _saved_head != NULL, "remember_new_clds(true) not called?");
1360 
1361   GrowableArray<ClassLoaderData*>* array = new GrowableArray<ClassLoaderData*>();
1362 
1363   // The CLDs in [_head, _saved_head] were all added during last call to remember_new_clds(true);
1364   ClassLoaderData* curr = _head;
1365   while (curr != _saved_head) {
1366     if (!curr->claimed()) {
1367       array->push(curr);
1368       LogTarget(Debug, class, loader, data) lt;
1369       if (lt.is_enabled()) {
1370         LogStream ls(lt);
1371         ls.print("found new CLD: ");
1372         curr->print_value_on(&ls);
1373         ls.cr();
1374       }
1375     }
1376 
1377     curr = curr->_next;
1378   }
1379 
1380   return array;
1381 }
1382 
1383 #ifndef PRODUCT
1384 bool ClassLoaderDataGraph::contains_loader_data(ClassLoaderData* loader_data) {
1385   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1386   for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1387     if (loader_data == data) {
1388       return true;
1389     }
1390   }
1391 
1392   return false;
1393 }
1394 #endif // PRODUCT
1395 
1396 // Move class loader data from main list to the unloaded list for unloading
1397 // and deallocation later.
1398 bool ClassLoaderDataGraph::do_unloading(bool do_cleaning) {
1399   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1400 
1401   // Indicate whether safepoint cleanup is needed.
1402   _safepoint_cleanup_needed |= do_cleaning;
1403 
1404   ClassLoaderData* data = _head;
1405   ClassLoaderData* prev = NULL;
1406   bool seen_dead_loader = false;
1407   uint loaders_processed = 0;
1408   uint loaders_removed = 0;
1409 
1410   // Save previous _unloading pointer for CMS which may add to unloading list before
1411   // purging and we don't want to rewalk the previously unloaded class loader data.
1412   _saved_unloading = _unloading;
1413 
1414   data = _head;
1415   while (data != NULL) {
1416     if (data->is_alive()) {
1417       prev = data;
1418       data = data->next();
1419       loaders_processed++;
1420       continue;
1421     }
1422     seen_dead_loader = true;
1423     loaders_removed++;
1424     ClassLoaderData* dead = data;
1425     dead->unload();
1426     data = data->next();
1427     // Remove from loader list.
1428     // This class loader data will no longer be found
1429     // in the ClassLoaderDataGraph.
1430     if (prev != NULL) {
1431       prev->set_next(data);
1432     } else {
1433       assert(dead == _head, "sanity check");
1434       _head = data;
1435     }
1436     dead->set_next(_unloading);
1437     _unloading = dead;
1438   }
1439 
1440   log_debug(class, loader, data)("do_unloading: loaders processed %u, loaders removed %u", loaders_processed, loaders_removed);
1441 
1442   return seen_dead_loader;
1443 }
1444 
1445 // There's at least one dead class loader.  Purge refererences of healthy module
1446 // reads lists and package export lists to modules belonging to dead loaders.
1447 void ClassLoaderDataGraph::clean_module_and_package_info() {
1448   assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
1449 
1450   ClassLoaderData* data = _head;
1451   while (data != NULL) {
1452     // Remove entries in the dictionary of live class loader that have
1453     // initiated loading classes in a dead class loader.
1454     if (data->dictionary() != NULL) {
1455       data->dictionary()->do_unloading();
1456     }
1457     // Walk a ModuleEntry's reads, and a PackageEntry's exports
1458     // lists to determine if there are modules on those lists that are now
1459     // dead and should be removed.  A module's life cycle is equivalent
1460     // to its defining class loader's life cycle.  Since a module is
1461     // considered dead if its class loader is dead, these walks must
1462     // occur after each class loader's aliveness is determined.
1463     if (data->packages() != NULL) {
1464       data->packages()->purge_all_package_exports();
1465     }
1466     if (data->modules_defined()) {
1467       data->modules()->purge_all_module_reads();
1468     }
1469     data = data->next();
1470   }
1471 }
1472 
1473 void ClassLoaderDataGraph::purge() {
1474   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1475   ClassLoaderData* list = _unloading;
1476   _unloading = NULL;
1477   ClassLoaderData* next = list;
1478   bool classes_unloaded = false;
1479   while (next != NULL) {
1480     ClassLoaderData* purge_me = next;
1481     next = purge_me->next();
1482     delete purge_me;
1483     classes_unloaded = true;
1484   }
1485   if (classes_unloaded) {
1486     Metaspace::purge();
1487     set_metaspace_oom(false);
1488   }
1489 }
1490 
1491 int ClassLoaderDataGraph::resize_if_needed() {
1492   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1493   int resized = 0;
1494   if (Dictionary::does_any_dictionary_needs_resizing()) {
1495     FOR_ALL_DICTIONARY(cld) {
1496       if (cld->dictionary()->resize_if_needed()) {
1497         resized++;
1498       }
1499     }
1500   }
1501   return resized;
1502 }
1503 
1504 ClassLoaderDataGraphKlassIteratorAtomic::ClassLoaderDataGraphKlassIteratorAtomic()
1505     : _next_klass(NULL) {
1506   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1507   ClassLoaderData* cld = ClassLoaderDataGraph::_head;
1508   Klass* klass = NULL;
1509 
1510   // Find the first klass in the CLDG.
1511   while (cld != NULL) {
1512     assert_locked_or_safepoint(cld->metaspace_lock());
1513     klass = cld->_klasses;
1514     if (klass != NULL) {
1515       _next_klass = klass;
1516       return;
1517     }
1518     cld = cld->next();
1519   }
1520 }
1521 
1522 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass_in_cldg(Klass* klass) {
1523   Klass* next = klass->next_link();
1524   if (next != NULL) {
1525     return next;
1526   }
1527 
1528   // No more klasses in the current CLD. Time to find a new CLD.
1529   ClassLoaderData* cld = klass->class_loader_data();
1530   assert_locked_or_safepoint(cld->metaspace_lock());
1531   while (next == NULL) {
1532     cld = cld->next();
1533     if (cld == NULL) {
1534       break;
1535     }
1536     next = cld->_klasses;
1537   }
1538 
1539   return next;
1540 }
1541 
1542 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() {
1543   Klass* head = _next_klass;
1544 
1545   while (head != NULL) {
1546     Klass* next = next_klass_in_cldg(head);
1547 
1548     Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head);
1549 
1550     if (old_head == head) {
1551       return head; // Won the CAS.
1552     }
1553 
1554     head = old_head;
1555   }
1556 
1557   // Nothing more for the iterator to hand out.
1558   assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1559   return NULL;
1560 }
1561 
1562 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() {
1563   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
1564   _data = ClassLoaderDataGraph::_head;
1565 }
1566 
1567 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {}
1568 
1569 #ifndef PRODUCT
1570 // callable from debugger
1571 extern "C" int print_loader_data_graph() {
1572   ResourceMark rm;
1573   ClassLoaderDataGraph::print_on(tty);
1574   return 0;
1575 }
1576 
1577 void ClassLoaderDataGraph::verify() {
1578   ClassLoaderDataGraphIterator iter;
1579   while (iter.repeat()) {
1580     ClassLoaderData* cld = iter.get_next();
1581     cld->verify();
1582   }
1583 }
1584 
1585 void ClassLoaderDataGraph::print_on(outputStream * const out) {
1586   ClassLoaderDataGraphIterator iter;
1587   while (iter.repeat()) {
1588     ClassLoaderData* cld = iter.get_next();
1589     cld->print_on(out);
1590   }
1591 }
1592 #endif // PRODUCT


  30 // according to its name resolution strategy.
  31 //
  32 // Class loaders that implement a deterministic name resolution strategy
  33 // (including with respect to their delegation behavior), such as the boot, the
  34 // platform, and the system loaders of the JDK's built-in class loader
  35 // hierarchy, always produce the same linkset for a given configuration.
  36 //
  37 // ClassLoaderData carries information related to a linkset (e.g.,
  38 // metaspace holding its klass definitions).
  39 // The System Dictionary and related data structures (e.g., placeholder table,
  40 // loader constraints table) as well as the runtime representation of classes
  41 // only reference ClassLoaderData.
  42 //
  43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that
  44 // that represent the loader's "linking domain" in the JVM.
  45 //
  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"

  50 #include "classfile/classLoaderData.inline.hpp"
  51 #include "classfile/classLoaderDataGraph.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/metadataOnStackMark.hpp"
  55 #include "classfile/moduleEntry.hpp"
  56 #include "classfile/packageEntry.hpp"
  57 #include "classfile/symbolTable.hpp"
  58 #include "classfile/systemDictionary.hpp"
  59 #include "logging/log.hpp"
  60 #include "logging/logStream.hpp"
  61 #include "memory/allocation.inline.hpp"
  62 #include "memory/metadataFactory.hpp"
  63 #include "memory/metaspaceShared.hpp"
  64 #include "memory/resourceArea.hpp"

  65 #include "oops/access.inline.hpp"
  66 #include "oops/oop.inline.hpp"
  67 #include "oops/oopHandle.inline.hpp"
  68 #include "oops/weakHandle.inline.hpp"
  69 #include "runtime/atomic.hpp"
  70 #include "runtime/handles.inline.hpp"
  71 #include "runtime/mutex.hpp"
  72 #include "runtime/orderAccess.hpp"
  73 #include "runtime/safepoint.hpp"

  74 #include "utilities/growableArray.hpp"
  75 #include "utilities/macros.hpp"
  76 #include "utilities/ostream.hpp"
  77 



  78 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  79 
  80 void ClassLoaderData::init_null_class_loader_data() {
  81   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  82   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  83 
  84   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  85   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  86   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  87 
  88   LogTarget(Trace, class, loader, data) lt;
  89   if (lt.is_enabled()) {
  90     ResourceMark rm;
  91     LogStream ls(lt);
  92     ls.print("create ");
  93     _the_null_class_loader_data->print_value_on(&ls);
  94     ls.cr();
  95   }
  96 }
  97 


 422   // It's a dependency we won't find through GC, add it.
 423   if (!_handles.contains(to)) {
 424     NOT_PRODUCT(Atomic::inc(&_dependency_count));
 425     LogTarget(Trace, class, loader, data) lt;
 426     if (lt.is_enabled()) {
 427       ResourceMark rm;
 428       LogStream ls(lt);
 429       ls.print("adding dependency from ");
 430       print_value_on(&ls);
 431       ls.print(" to ");
 432       to_cld->print_value_on(&ls);
 433       ls.cr();
 434     }
 435     Handle dependency(Thread::current(), to);
 436     add_handle(dependency);
 437     // Added a potentially young gen oop to the ClassLoaderData
 438     record_modified_oops();
 439   }
 440 }
 441 







 442 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
 443   {
 444     MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 445     Klass* old_value = _klasses;
 446     k->set_next_link(old_value);
 447     // Link the new item into the list, making sure the linked class is stable
 448     // since the list can be walked without a lock
 449     OrderAccess::release_store(&_klasses, k);
 450     if (k->is_array_klass()) {
 451       ClassLoaderDataGraph::inc_array_classes(1);
 452     } else {
 453       ClassLoaderDataGraph::inc_instance_classes(1);
 454     }
 455   }
 456 
 457   if (publicize) {
 458     LogTarget(Trace, class, loader, data) lt;
 459     if (lt.is_enabled()) {
 460       ResourceMark rm;
 461       LogStream ls(lt);
 462       ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name());
 463       print_value_on(&ls);
 464       ls.cr();
 465     }
 466   }
 467 }
 468 








































































 469 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) {
 470   if (loader_or_mirror() != NULL) {
 471     assert(_holder.is_null(), "never replace holders");
 472     _holder = WeakHandle<vm_class_loader_data>::create(loader_or_mirror);
 473   }
 474 }
 475 
 476 // Remove a klass from the _klasses list for scratch_class during redefinition
 477 // or parsed class in the case of an error.
 478 void ClassLoaderData::remove_class(Klass* scratch_class) {
 479   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 480 
 481   // Adjust global class iterator.
 482   ClassLoaderDataGraph::adjust_saved_class(scratch_class);
 483 
 484   Klass* prev = NULL;
 485   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 486     if (k == scratch_class) {
 487       if (prev == NULL) {
 488         _klasses = k->next_link();
 489       } else {
 490         Klass* next = k->next_link();
 491         prev->set_next_link(next);
 492       }
 493 
 494       if (k->is_array_klass()) {
 495         ClassLoaderDataGraph::dec_array_classes(1);
 496       } else {
 497         ClassLoaderDataGraph::dec_instance_classes(1);
 498       }
 499 
 500       return;
 501     }
 502     prev = k;


 510 
 511   LogTarget(Trace, class, loader, data) lt;
 512   if (lt.is_enabled()) {
 513     ResourceMark rm;
 514     LogStream ls(lt);
 515     ls.print("unload");
 516     print_value_on(&ls);
 517     ls.cr();
 518   }
 519 
 520   // Some items on the _deallocate_list need to free their C heap structures
 521   // if they are not already on the _klasses list.
 522   free_deallocate_list_C_heap_structures();
 523 
 524   // Clean up class dependencies and tell serviceability tools
 525   // these classes are unloading.  Must be called
 526   // after erroneous classes are released.
 527   classes_do(InstanceKlass::unload_class);
 528 
 529   // Clean up global class iterator for compiler
 530   ClassLoaderDataGraph::adjust_saved_class(this);
 531 }
 532 
 533 ModuleEntryTable* ClassLoaderData::modules() {
 534   // Lazily create the module entry table at first request.
 535   // Lock-free access requires load_acquire.
 536   ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
 537   if (modules == NULL) {
 538     MutexLocker m1(Module_lock);
 539     // Check if _modules got allocated while we were waiting for this lock.
 540     if ((modules = _modules) == NULL) {
 541       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 542 
 543       {
 544         MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
 545         // Ensure _modules is stable, since it is examined without a lock
 546         OrderAccess::release_store(&_modules, modules);
 547       }
 548     }
 549   }
 550   return modules;


 813         MetadataFactory::free_metadata(this, (Method*)m);
 814       } else if (m->is_constantPool()) {
 815         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 816       } else if (m->is_klass()) {
 817         MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 818       } else {
 819         ShouldNotReachHere();
 820       }
 821     } else {
 822       // Metadata is alive.
 823       // If scratch_class is on stack then it shouldn't be on this list!
 824       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 825              "scratch classes on this list should be dead");
 826       // Also should assert that other metadata on the list was found in handles.
 827       // Some cleaning remains.
 828       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 829     }
 830   }
 831 }
 832 



































 833 // This is distinct from free_deallocate_list.  For class loader data that are
 834 // unloading, this frees the C heap memory for items on the list, and unlinks
 835 // scratch or error classes so that unloading events aren't triggered for these
 836 // classes. The metadata is removed with the unloading metaspace.
 837 // There isn't C heap memory allocated for methods, so nothing is done for them.
 838 void ClassLoaderData::free_deallocate_list_C_heap_structures() {
 839   // Don't need lock, at safepoint
 840   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 841   assert(is_unloading(), "only called for ClassLoaderData that are unloading");
 842   if (_deallocate_list == NULL) {
 843     return;
 844   }
 845   // Go backwards because this removes entries that are freed.
 846   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 847     Metadata* m = _deallocate_list->at(i);
 848     _deallocate_list->remove_at(i);
 849     if (m->is_constantPool()) {
 850       ((ConstantPool*)m)->release_C_heap_structures();
 851     } else if (m->is_klass()) {
 852       InstanceKlass* ik = (InstanceKlass*)m;


 934 
 935   // Verify the integrity of the allocated space.
 936   if (metaspace_or_null() != NULL) {
 937     metaspace_or_null()->verify();
 938   }
 939 
 940   for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
 941     guarantee(k->class_loader_data() == this, "Must be the same");
 942     k->verify();
 943     assert(k != k->next_link(), "no loops!");
 944   }
 945 }
 946 
 947 bool ClassLoaderData::contains_klass(Klass* klass) {
 948   // Lock-free access requires load_acquire
 949   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 950     if (k == klass) return true;
 951   }
 952   return false;
 953 }








































































































































































































































































































































































































































































































































< prev index next >