65 #include "memory/resourceArea.hpp"
66 #include "oops/objArrayOop.inline.hpp"
67 #include "oops/oop.inline.hpp"
68 #include "runtime/atomic.hpp"
69 #include "runtime/javaCalls.hpp"
70 #include "runtime/jniHandles.hpp"
71 #include "runtime/mutex.hpp"
72 #include "runtime/orderAccess.hpp"
73 #include "runtime/safepoint.hpp"
74 #include "runtime/synchronizer.hpp"
75 #include "utilities/growableArray.hpp"
76 #include "utilities/macros.hpp"
77 #include "utilities/ostream.hpp"
78 #if INCLUDE_ALL_GCS
79 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
80 #endif // INCLUDE_ALL_GCS
81 #if INCLUDE_TRACE
82 #include "trace/tracing.hpp"
83 #endif
84
85 // helper function to avoid in-line casts
86 template <typename T> static T* load_ptr_acquire(T* volatile *p) {
87 return static_cast<T*>(OrderAccess::load_ptr_acquire(p));
88 }
89
90 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
91
92 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) :
93 _class_loader(h_class_loader()),
94 _is_anonymous(is_anonymous),
95 // An anonymous class loader data doesn't have anything to keep
96 // it from being unloaded during parsing of the anonymous class.
97 // The null-class-loader should always be kept alive.
98 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0),
99 _metaspace(NULL), _unloading(false), _klasses(NULL),
100 _modules(NULL), _packages(NULL),
101 _claimed(0), _modified_oops(true), _accumulated_modified_oops(false),
102 _jmethod_ids(NULL), _handles(), _deallocate_list(NULL),
103 _next(NULL), _dependencies(dependencies),
104 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true,
105 Monitor::_safepoint_check_never)) {
106
107 // A ClassLoaderData created solely for an anonymous class should never have a
108 // ModuleEntryTable or PackageEntryTable created for it. The defining package
109 // and module for an anonymous class will be found in its host class.
135 }
136
137 void ClassLoaderData::Dependencies::init(TRAPS) {
138 // Create empty dependencies array to add to. CMS requires this to be
139 // an oop so that it can track additions via card marks. We think.
140 _list_head = oopFactory::new_objectArray(2, CHECK);
141 }
142
143 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
144 Chunk* c = _head;
145 while (c != NULL) {
146 Chunk* next = c->_next;
147 delete c;
148 c = next;
149 }
150 }
151
152 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
153 if (_head == NULL || _head->_size == Chunk::CAPACITY) {
154 Chunk* next = new Chunk(_head);
155 OrderAccess::release_store_ptr(&_head, next);
156 }
157 oop* handle = &_head->_data[_head->_size];
158 *handle = o;
159 OrderAccess::release_store(&_head->_size, _head->_size + 1);
160 return handle;
161 }
162
163 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
164 for (juint i = 0; i < size; i++) {
165 if (c->_data[i] != NULL) {
166 f->do_oop(&c->_data[i]);
167 }
168 }
169 }
170
171 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
172 Chunk* head = (Chunk*) OrderAccess::load_ptr_acquire(&_head);
173 if (head != NULL) {
174 // Must be careful when reading size of head
175 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
176 for (Chunk* c = head->_next; c != NULL; c = c->_next) {
177 oops_do_chunk(f, c, c->_size);
178 }
179 }
180 }
181
182 #ifdef ASSERT
183 class VerifyContainsOopClosure : public OopClosure {
184 oop* _target;
185 bool _found;
186
187 public:
188 VerifyContainsOopClosure(oop* target) : _target(target), _found(false) {}
189
190 void do_oop(oop* p) {
191 if (p == _target) {
192 _found = true;
240 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) {
241 if (must_claim && !claim()) {
242 return;
243 }
244
245 // Only clear modified_oops after the ClassLoaderData is claimed.
246 if (clear_mod_oops) {
247 clear_modified_oops();
248 }
249
250 f->do_oop(&_class_loader);
251 _dependencies.oops_do(f);
252 _handles.oops_do(f);
253 }
254
255 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) {
256 f->do_oop((oop*)&_list_head);
257 }
258
259 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
260 // Lock-free access requires load_ptr_acquire
261 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
262 klass_closure->do_klass(k);
263 assert(k != k->next_link(), "no loops!");
264 }
265 }
266
267 void ClassLoaderData::classes_do(void f(Klass * const)) {
268 // Lock-free access requires load_ptr_acquire
269 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
270 f(k);
271 assert(k != k->next_link(), "no loops!");
272 }
273 }
274
275 void ClassLoaderData::methods_do(void f(Method*)) {
276 // Lock-free access requires load_ptr_acquire
277 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
278 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
279 InstanceKlass::cast(k)->methods_do(f);
280 }
281 }
282 }
283
284 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
285 // Lock-free access requires load_ptr_acquire
286 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
287 // Do not filter ArrayKlass oops here...
288 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
289 klass_closure->do_klass(k);
290 }
291 }
292 }
293
294 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
295 // Lock-free access requires load_ptr_acquire
296 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
297 if (k->is_instance_klass()) {
298 f(InstanceKlass::cast(k));
299 }
300 assert(k != k->next_link(), "no loops!");
301 }
302 }
303
304 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
305 assert_locked_or_safepoint(Module_lock);
306 if (_unnamed_module != NULL) {
307 f(_unnamed_module);
308 }
309 if (_modules != NULL) {
310 for (int i = 0; i < _modules->table_size(); i++) {
311 for (ModuleEntry* entry = _modules->bucket(i);
312 entry != NULL;
313 entry = entry->next()) {
314 f(entry);
315 }
316 }
432 if (last->obj_at(0) == NULL) {
433 last->obj_at_put(0, new_dependency->obj_at(0));
434 } else {
435 last->obj_at_put(1, new_dependency());
436 }
437 }
438
439 void ClassLoaderDataGraph::clear_claimed_marks() {
440 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
441 cld->clear_claimed();
442 }
443 }
444
445 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
446 {
447 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
448 Klass* old_value = _klasses;
449 k->set_next_link(old_value);
450 // Link the new item into the list, making sure the linked class is stable
451 // since the list can be walked without a lock
452 OrderAccess::release_store_ptr(&_klasses, k);
453 }
454
455 if (publicize && k->class_loader_data() != NULL) {
456 ResourceMark rm;
457 log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: "
458 PTR_FORMAT " loader: " PTR_FORMAT " %s",
459 p2i(k),
460 k->external_name(),
461 p2i(k->class_loader_data()),
462 p2i((void *)k->class_loader()),
463 loader_name());
464 }
465 }
466
467 // Class iterator used by the compiler. It gets some number of classes at
468 // a safepoint to decay invocation counters on the methods.
469 class ClassLoaderDataGraphKlassIteratorStatic {
470 ClassLoaderData* _current_loader_data;
471 Klass* _current_class_entry;
472 public:
572 LogStream ls(lt);
573 ls.print(": unload loader data " INTPTR_FORMAT, p2i(this));
574 ls.print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()),
575 loader_name());
576 if (is_anonymous()) {
577 ls.print(" for anonymous class " INTPTR_FORMAT " ", p2i(_klasses));
578 }
579 ls.cr();
580 }
581
582 // In some rare cases items added to this list will not be freed elsewhere.
583 // To keep it simple, just free everything in it here.
584 free_deallocate_list();
585
586 // Clean up global class iterator for compiler
587 static_klass_iterator.adjust_saved_class(this);
588 }
589
590 ModuleEntryTable* ClassLoaderData::modules() {
591 // Lazily create the module entry table at first request.
592 // Lock-free access requires load_ptr_acquire.
593 ModuleEntryTable* modules = load_ptr_acquire(&_modules);
594 if (modules == NULL) {
595 MutexLocker m1(Module_lock);
596 // Check if _modules got allocated while we were waiting for this lock.
597 if ((modules = _modules) == NULL) {
598 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
599
600 {
601 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
602 // Ensure _modules is stable, since it is examined without a lock
603 OrderAccess::release_store_ptr(&_modules, modules);
604 }
605 }
606 }
607 return modules;
608 }
609
610 const int _boot_loader_dictionary_size = 1009;
611 const int _default_loader_dictionary_size = 107;
612 const int _prime_array_size = 8; // array of primes for system dictionary size
613 const int _average_depth_goal = 3; // goal for lookup length
614 const int _primelist[_prime_array_size] = {107, 1009, 2017, 4049, 5051, 10103, 20201, 40423};
615
616 // Calculate a "good" dictionary size based
617 // on predicted or current loaded classes count.
618 static int calculate_dictionary_size(int classcount) {
619 int newsize = _primelist[0];
620 if (classcount > 0 && !DumpSharedSpaces) {
621 int index = 0;
622 int desiredsize = classcount/_average_depth_goal;
623 for (newsize = _primelist[index]; index < _prime_array_size -1;
720
721 // Returns true if this class loader data is for the platform class loader.
722 bool ClassLoaderData::is_platform_class_loader_data() const {
723 return SystemDictionary::is_platform_class_loader(class_loader());
724 }
725
726 // Returns true if this class loader data is one of the 3 builtin
727 // (boot, application/system or platform) class loaders. Note, the
728 // builtin loaders are not freed by a GC.
729 bool ClassLoaderData::is_builtin_class_loader_data() const {
730 return (is_the_null_class_loader_data() ||
731 SystemDictionary::is_system_class_loader(class_loader()) ||
732 SystemDictionary::is_platform_class_loader(class_loader()));
733 }
734
735 Metaspace* ClassLoaderData::metaspace_non_null() {
736 // If the metaspace has not been allocated, create a new one. Might want
737 // to create smaller arena for Reflection class loaders also.
738 // The reason for the delayed allocation is because some class loaders are
739 // simply for delegating with no metadata of their own.
740 // Lock-free access requires load_ptr_acquire.
741 Metaspace* metaspace = load_ptr_acquire(&_metaspace);
742 if (metaspace == NULL) {
743 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag);
744 // Check if _metaspace got allocated while we were waiting for this lock.
745 if ((metaspace = _metaspace) == NULL) {
746 if (this == the_null_class_loader_data()) {
747 assert (class_loader() == NULL, "Must be");
748 metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
749 } else if (is_anonymous()) {
750 if (class_loader() != NULL) {
751 log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name());
752 }
753 metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType);
754 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
755 if (class_loader() != NULL) {
756 log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name());
757 }
758 metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
759 } else {
760 metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
761 }
762 // Ensure _metaspace is stable, since it is examined without a lock
763 OrderAccess::release_store_ptr(&_metaspace, metaspace);
764 }
765 }
766 return metaspace;
767 }
768
769 OopHandle ClassLoaderData::add_handle(Handle h) {
770 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
771 record_modified_oops();
772 return OopHandle(_handles.add(h()));
773 }
774
775 void ClassLoaderData::remove_handle(OopHandle h) {
776 oop* ptr = h.ptr_raw();
777 if (ptr != NULL) {
778 assert(_handles.contains(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
779 #if INCLUDE_ALL_GCS
780 // This barrier is used by G1 to remember the old oop values, so
781 // that we don't forget any objects that were live at the snapshot at
782 // the beginning.
783 if (UseG1GC) {
897 void ClassLoaderData::verify() {
898 assert_locked_or_safepoint(_metaspace_lock);
899 oop cl = class_loader();
900
901 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same");
902 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be");
903
904 // Verify the integrity of the allocated space.
905 if (metaspace_or_null() != NULL) {
906 metaspace_or_null()->verify();
907 }
908
909 for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
910 guarantee(k->class_loader_data() == this, "Must be the same");
911 k->verify();
912 assert(k != k->next_link(), "no loops!");
913 }
914 }
915
916 bool ClassLoaderData::contains_klass(Klass* klass) {
917 // Lock-free access requires load_ptr_acquire
918 for (Klass* k = load_ptr_acquire(&_klasses); k != NULL; k = k->next_link()) {
919 if (k == klass) return true;
920 }
921 return false;
922 }
923
924
925 // GC root of class loader data created.
926 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
927 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
928 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
929 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
930
931 bool ClassLoaderDataGraph::_should_purge = false;
932 bool ClassLoaderDataGraph::_metaspace_oom = false;
933
934 // Add a new class loader data node to the list. Assign the newly created
935 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
936 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) {
937 // We need to allocate all the oops for the ClassLoaderData before allocating the
938 // actual ClassLoaderData object.
939 ClassLoaderData::Dependencies dependencies(CHECK_NULL);
940
941 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the
942 // ClassLoaderData in the graph since the CLD
943 // contains unhandled oops
944
945 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies);
946
947
948 if (!is_anonymous) {
949 ClassLoaderData** cld_addr = java_lang_ClassLoader::loader_data_addr(loader());
950 // First, Atomically set it
951 ClassLoaderData* old = (ClassLoaderData*) Atomic::cmpxchg_ptr(cld, cld_addr, NULL);
952 if (old != NULL) {
953 delete cld;
954 // Returns the data.
955 return old;
956 }
957 }
958
959 // We won the race, and therefore the task of adding the data to the list of
960 // class loader data
961 ClassLoaderData** list_head = &_head;
962 ClassLoaderData* next = _head;
963
964 do {
965 cld->set_next(next);
966 ClassLoaderData* exchanged = (ClassLoaderData*)Atomic::cmpxchg_ptr(cld, list_head, next);
967 if (exchanged == next) {
968 LogTarget(Debug, class, loader, data) lt;
969 if (lt.is_enabled()) {
970 PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual
971 LogStream ls(lt);
972 print_creation(&ls, loader, cld, CHECK_NULL);
973 }
974 return cld;
975 }
976 next = exchanged;
977 } while (true);
978 }
979
980 void ClassLoaderDataGraph::print_creation(outputStream* out, Handle loader, ClassLoaderData* cld, TRAPS) {
981 Handle string;
982 if (loader.not_null()) {
983 // Include the result of loader.toString() in the output. This allows
984 // the user of the log to identify the class loader instance.
985 JavaValue result(T_OBJECT);
986 Klass* spec_klass = SystemDictionary::ClassLoader_klass();
1370 // No more klasses in the current CLD. Time to find a new CLD.
1371 ClassLoaderData* cld = klass->class_loader_data();
1372 assert_locked_or_safepoint(cld->metaspace_lock());
1373 while (next == NULL) {
1374 cld = cld->next();
1375 if (cld == NULL) {
1376 break;
1377 }
1378 next = cld->_klasses;
1379 }
1380
1381 return next;
1382 }
1383
1384 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() {
1385 Klass* head = _next_klass;
1386
1387 while (head != NULL) {
1388 Klass* next = next_klass_in_cldg(head);
1389
1390 Klass* old_head = (Klass*)Atomic::cmpxchg_ptr(next, &_next_klass, head);
1391
1392 if (old_head == head) {
1393 return head; // Won the CAS.
1394 }
1395
1396 head = old_head;
1397 }
1398
1399 // Nothing more for the iterator to hand out.
1400 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1401 return NULL;
1402 }
1403
1404 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() {
1405 _data = ClassLoaderDataGraph::_head;
1406 }
1407
1408 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {}
1409
1410 #ifndef PRODUCT
|
65 #include "memory/resourceArea.hpp"
66 #include "oops/objArrayOop.inline.hpp"
67 #include "oops/oop.inline.hpp"
68 #include "runtime/atomic.hpp"
69 #include "runtime/javaCalls.hpp"
70 #include "runtime/jniHandles.hpp"
71 #include "runtime/mutex.hpp"
72 #include "runtime/orderAccess.hpp"
73 #include "runtime/safepoint.hpp"
74 #include "runtime/synchronizer.hpp"
75 #include "utilities/growableArray.hpp"
76 #include "utilities/macros.hpp"
77 #include "utilities/ostream.hpp"
78 #if INCLUDE_ALL_GCS
79 #include "gc/g1/g1SATBCardTableModRefBS.hpp"
80 #endif // INCLUDE_ALL_GCS
81 #if INCLUDE_TRACE
82 #include "trace/tracing.hpp"
83 #endif
84
85 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
86
87 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) :
88 _class_loader(h_class_loader()),
89 _is_anonymous(is_anonymous),
90 // An anonymous class loader data doesn't have anything to keep
91 // it from being unloaded during parsing of the anonymous class.
92 // The null-class-loader should always be kept alive.
93 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0),
94 _metaspace(NULL), _unloading(false), _klasses(NULL),
95 _modules(NULL), _packages(NULL),
96 _claimed(0), _modified_oops(true), _accumulated_modified_oops(false),
97 _jmethod_ids(NULL), _handles(), _deallocate_list(NULL),
98 _next(NULL), _dependencies(dependencies),
99 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true,
100 Monitor::_safepoint_check_never)) {
101
102 // A ClassLoaderData created solely for an anonymous class should never have a
103 // ModuleEntryTable or PackageEntryTable created for it. The defining package
104 // and module for an anonymous class will be found in its host class.
130 }
131
132 void ClassLoaderData::Dependencies::init(TRAPS) {
133 // Create empty dependencies array to add to. CMS requires this to be
134 // an oop so that it can track additions via card marks. We think.
135 _list_head = oopFactory::new_objectArray(2, CHECK);
136 }
137
138 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
139 Chunk* c = _head;
140 while (c != NULL) {
141 Chunk* next = c->_next;
142 delete c;
143 c = next;
144 }
145 }
146
147 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
148 if (_head == NULL || _head->_size == Chunk::CAPACITY) {
149 Chunk* next = new Chunk(_head);
150 OrderAccess::release_store(&_head, next);
151 }
152 oop* handle = &_head->_data[_head->_size];
153 *handle = o;
154 OrderAccess::release_store(&_head->_size, _head->_size + 1);
155 return handle;
156 }
157
158 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
159 for (juint i = 0; i < size; i++) {
160 if (c->_data[i] != NULL) {
161 f->do_oop(&c->_data[i]);
162 }
163 }
164 }
165
166 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
167 Chunk* head = OrderAccess::load_acquire(&_head);
168 if (head != NULL) {
169 // Must be careful when reading size of head
170 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
171 for (Chunk* c = head->_next; c != NULL; c = c->_next) {
172 oops_do_chunk(f, c, c->_size);
173 }
174 }
175 }
176
177 #ifdef ASSERT
178 class VerifyContainsOopClosure : public OopClosure {
179 oop* _target;
180 bool _found;
181
182 public:
183 VerifyContainsOopClosure(oop* target) : _target(target), _found(false) {}
184
185 void do_oop(oop* p) {
186 if (p == _target) {
187 _found = true;
235 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) {
236 if (must_claim && !claim()) {
237 return;
238 }
239
240 // Only clear modified_oops after the ClassLoaderData is claimed.
241 if (clear_mod_oops) {
242 clear_modified_oops();
243 }
244
245 f->do_oop(&_class_loader);
246 _dependencies.oops_do(f);
247 _handles.oops_do(f);
248 }
249
250 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) {
251 f->do_oop((oop*)&_list_head);
252 }
253
254 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
255 // Lock-free access requires load_acquire
256 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
257 klass_closure->do_klass(k);
258 assert(k != k->next_link(), "no loops!");
259 }
260 }
261
262 void ClassLoaderData::classes_do(void f(Klass * const)) {
263 // Lock-free access requires load_acquire
264 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
265 f(k);
266 assert(k != k->next_link(), "no loops!");
267 }
268 }
269
270 void ClassLoaderData::methods_do(void f(Method*)) {
271 // Lock-free access requires load_acquire
272 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
273 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
274 InstanceKlass::cast(k)->methods_do(f);
275 }
276 }
277 }
278
279 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
280 // Lock-free access requires load_acquire
281 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
282 // Do not filter ArrayKlass oops here...
283 if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
284 klass_closure->do_klass(k);
285 }
286 }
287 }
288
289 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
290 // Lock-free access requires load_acquire
291 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
292 if (k->is_instance_klass()) {
293 f(InstanceKlass::cast(k));
294 }
295 assert(k != k->next_link(), "no loops!");
296 }
297 }
298
299 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
300 assert_locked_or_safepoint(Module_lock);
301 if (_unnamed_module != NULL) {
302 f(_unnamed_module);
303 }
304 if (_modules != NULL) {
305 for (int i = 0; i < _modules->table_size(); i++) {
306 for (ModuleEntry* entry = _modules->bucket(i);
307 entry != NULL;
308 entry = entry->next()) {
309 f(entry);
310 }
311 }
427 if (last->obj_at(0) == NULL) {
428 last->obj_at_put(0, new_dependency->obj_at(0));
429 } else {
430 last->obj_at_put(1, new_dependency());
431 }
432 }
433
434 void ClassLoaderDataGraph::clear_claimed_marks() {
435 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
436 cld->clear_claimed();
437 }
438 }
439
440 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
441 {
442 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
443 Klass* old_value = _klasses;
444 k->set_next_link(old_value);
445 // Link the new item into the list, making sure the linked class is stable
446 // since the list can be walked without a lock
447 OrderAccess::release_store(&_klasses, k);
448 }
449
450 if (publicize && k->class_loader_data() != NULL) {
451 ResourceMark rm;
452 log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: "
453 PTR_FORMAT " loader: " PTR_FORMAT " %s",
454 p2i(k),
455 k->external_name(),
456 p2i(k->class_loader_data()),
457 p2i((void *)k->class_loader()),
458 loader_name());
459 }
460 }
461
462 // Class iterator used by the compiler. It gets some number of classes at
463 // a safepoint to decay invocation counters on the methods.
464 class ClassLoaderDataGraphKlassIteratorStatic {
465 ClassLoaderData* _current_loader_data;
466 Klass* _current_class_entry;
467 public:
567 LogStream ls(lt);
568 ls.print(": unload loader data " INTPTR_FORMAT, p2i(this));
569 ls.print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()),
570 loader_name());
571 if (is_anonymous()) {
572 ls.print(" for anonymous class " INTPTR_FORMAT " ", p2i(_klasses));
573 }
574 ls.cr();
575 }
576
577 // In some rare cases items added to this list will not be freed elsewhere.
578 // To keep it simple, just free everything in it here.
579 free_deallocate_list();
580
581 // Clean up global class iterator for compiler
582 static_klass_iterator.adjust_saved_class(this);
583 }
584
585 ModuleEntryTable* ClassLoaderData::modules() {
586 // Lazily create the module entry table at first request.
587 // Lock-free access requires load_acquire.
588 ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
589 if (modules == NULL) {
590 MutexLocker m1(Module_lock);
591 // Check if _modules got allocated while we were waiting for this lock.
592 if ((modules = _modules) == NULL) {
593 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
594
595 {
596 MutexLockerEx m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
597 // Ensure _modules is stable, since it is examined without a lock
598 OrderAccess::release_store(&_modules, modules);
599 }
600 }
601 }
602 return modules;
603 }
604
605 const int _boot_loader_dictionary_size = 1009;
606 const int _default_loader_dictionary_size = 107;
607 const int _prime_array_size = 8; // array of primes for system dictionary size
608 const int _average_depth_goal = 3; // goal for lookup length
609 const int _primelist[_prime_array_size] = {107, 1009, 2017, 4049, 5051, 10103, 20201, 40423};
610
611 // Calculate a "good" dictionary size based
612 // on predicted or current loaded classes count.
613 static int calculate_dictionary_size(int classcount) {
614 int newsize = _primelist[0];
615 if (classcount > 0 && !DumpSharedSpaces) {
616 int index = 0;
617 int desiredsize = classcount/_average_depth_goal;
618 for (newsize = _primelist[index]; index < _prime_array_size -1;
715
716 // Returns true if this class loader data is for the platform class loader.
717 bool ClassLoaderData::is_platform_class_loader_data() const {
718 return SystemDictionary::is_platform_class_loader(class_loader());
719 }
720
721 // Returns true if this class loader data is one of the 3 builtin
722 // (boot, application/system or platform) class loaders. Note, the
723 // builtin loaders are not freed by a GC.
724 bool ClassLoaderData::is_builtin_class_loader_data() const {
725 return (is_the_null_class_loader_data() ||
726 SystemDictionary::is_system_class_loader(class_loader()) ||
727 SystemDictionary::is_platform_class_loader(class_loader()));
728 }
729
730 Metaspace* ClassLoaderData::metaspace_non_null() {
731 // If the metaspace has not been allocated, create a new one. Might want
732 // to create smaller arena for Reflection class loaders also.
733 // The reason for the delayed allocation is because some class loaders are
734 // simply for delegating with no metadata of their own.
735 // Lock-free access requires load_acquire.
736 Metaspace* metaspace = OrderAccess::load_acquire(&_metaspace);
737 if (metaspace == NULL) {
738 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag);
739 // Check if _metaspace got allocated while we were waiting for this lock.
740 if ((metaspace = _metaspace) == NULL) {
741 if (this == the_null_class_loader_data()) {
742 assert (class_loader() == NULL, "Must be");
743 metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
744 } else if (is_anonymous()) {
745 if (class_loader() != NULL) {
746 log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name());
747 }
748 metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType);
749 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
750 if (class_loader() != NULL) {
751 log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name());
752 }
753 metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
754 } else {
755 metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
756 }
757 // Ensure _metaspace is stable, since it is examined without a lock
758 OrderAccess::release_store(&_metaspace, metaspace);
759 }
760 }
761 return metaspace;
762 }
763
764 OopHandle ClassLoaderData::add_handle(Handle h) {
765 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
766 record_modified_oops();
767 return OopHandle(_handles.add(h()));
768 }
769
770 void ClassLoaderData::remove_handle(OopHandle h) {
771 oop* ptr = h.ptr_raw();
772 if (ptr != NULL) {
773 assert(_handles.contains(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
774 #if INCLUDE_ALL_GCS
775 // This barrier is used by G1 to remember the old oop values, so
776 // that we don't forget any objects that were live at the snapshot at
777 // the beginning.
778 if (UseG1GC) {
892 void ClassLoaderData::verify() {
893 assert_locked_or_safepoint(_metaspace_lock);
894 oop cl = class_loader();
895
896 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same");
897 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be");
898
899 // Verify the integrity of the allocated space.
900 if (metaspace_or_null() != NULL) {
901 metaspace_or_null()->verify();
902 }
903
904 for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
905 guarantee(k->class_loader_data() == this, "Must be the same");
906 k->verify();
907 assert(k != k->next_link(), "no loops!");
908 }
909 }
910
911 bool ClassLoaderData::contains_klass(Klass* klass) {
912 // Lock-free access requires load_acquire
913 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
914 if (k == klass) return true;
915 }
916 return false;
917 }
918
919
920 // GC root of class loader data created.
921 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
922 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
923 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
924 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
925
926 bool ClassLoaderDataGraph::_should_purge = false;
927 bool ClassLoaderDataGraph::_metaspace_oom = false;
928
929 // Add a new class loader data node to the list. Assign the newly created
930 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
931 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) {
932 // We need to allocate all the oops for the ClassLoaderData before allocating the
933 // actual ClassLoaderData object.
934 ClassLoaderData::Dependencies dependencies(CHECK_NULL);
935
936 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the
937 // ClassLoaderData in the graph since the CLD
938 // contains unhandled oops
939
940 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies);
941
942
943 if (!is_anonymous) {
944 ClassLoaderData** cld_addr = java_lang_ClassLoader::loader_data_addr(loader());
945 // First, Atomically set it
946 ClassLoaderData* old = Atomic::cmpxchg(cld, cld_addr, (ClassLoaderData*)NULL);
947 if (old != NULL) {
948 delete cld;
949 // Returns the data.
950 return old;
951 }
952 }
953
954 // We won the race, and therefore the task of adding the data to the list of
955 // class loader data
956 ClassLoaderData** list_head = &_head;
957 ClassLoaderData* next = _head;
958
959 do {
960 cld->set_next(next);
961 ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next);
962 if (exchanged == next) {
963 LogTarget(Debug, class, loader, data) lt;
964 if (lt.is_enabled()) {
965 PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual
966 LogStream ls(lt);
967 print_creation(&ls, loader, cld, CHECK_NULL);
968 }
969 return cld;
970 }
971 next = exchanged;
972 } while (true);
973 }
974
975 void ClassLoaderDataGraph::print_creation(outputStream* out, Handle loader, ClassLoaderData* cld, TRAPS) {
976 Handle string;
977 if (loader.not_null()) {
978 // Include the result of loader.toString() in the output. This allows
979 // the user of the log to identify the class loader instance.
980 JavaValue result(T_OBJECT);
981 Klass* spec_klass = SystemDictionary::ClassLoader_klass();
1365 // No more klasses in the current CLD. Time to find a new CLD.
1366 ClassLoaderData* cld = klass->class_loader_data();
1367 assert_locked_or_safepoint(cld->metaspace_lock());
1368 while (next == NULL) {
1369 cld = cld->next();
1370 if (cld == NULL) {
1371 break;
1372 }
1373 next = cld->_klasses;
1374 }
1375
1376 return next;
1377 }
1378
1379 Klass* ClassLoaderDataGraphKlassIteratorAtomic::next_klass() {
1380 Klass* head = _next_klass;
1381
1382 while (head != NULL) {
1383 Klass* next = next_klass_in_cldg(head);
1384
1385 Klass* old_head = Atomic::cmpxchg(next, &_next_klass, head);
1386
1387 if (old_head == head) {
1388 return head; // Won the CAS.
1389 }
1390
1391 head = old_head;
1392 }
1393
1394 // Nothing more for the iterator to hand out.
1395 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1396 return NULL;
1397 }
1398
1399 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() {
1400 _data = ClassLoaderDataGraph::_head;
1401 }
1402
1403 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {}
1404
1405 #ifndef PRODUCT
|