25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/dictionary.hpp"
28 #include "classfile/systemDictionary.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "gc_implementation/shared/markSweep.inline.hpp"
31 #include "gc_interface/collectedHeap.inline.hpp"
32 #include "memory/heapInspection.hpp"
33 #include "memory/metadataFactory.hpp"
34 #include "memory/oopFactory.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "oops/instanceKlass.hpp"
37 #include "oops/klass.inline.hpp"
38 #include "oops/oop.inline2.hpp"
39 #include "runtime/atomic.inline.hpp"
40 #include "runtime/orderAccess.inline.hpp"
41 #include "trace/traceMacros.hpp"
42 #include "utilities/stack.hpp"
43 #include "utilities/macros.hpp"
44 #if INCLUDE_ALL_GCS
45 #include "gc_implementation/parallelScavenge/psParallelCompact.hpp"
46 #include "gc_implementation/parallelScavenge/psPromotionManager.hpp"
47 #include "gc_implementation/parallelScavenge/psScavenge.hpp"
48 #endif // INCLUDE_ALL_GCS
49
50 void Klass::set_name(Symbol* n) {
51 _name = n;
52 if (_name != NULL) _name->increment_refcount();
53 }
54
55 bool Klass::is_subclass_of(const Klass* k) const {
56 // Run up the super chain and check
57 if (this == k) return true;
58
59 Klass* t = const_cast<Klass*>(this)->super();
60
61 while (t != NULL) {
62 if (t == k) return true;
63 t = t->super();
64 }
142
143 void* Klass::operator new(size_t size, ClassLoaderData* loader_data, size_t word_size, TRAPS) throw() {
144 return Metaspace::allocate(loader_data, word_size, /*read_only*/false,
145 MetaspaceObj::ClassType, CHECK_NULL);
146 }
147
148 Klass::Klass() {
149 Klass* k = this;
150
151 // Preinitialize supertype information.
152 // A later call to initialize_supers() may update these settings:
153 set_super(NULL);
154 for (juint i = 0; i < Klass::primary_super_limit(); i++) {
155 _primary_supers[i] = NULL;
156 }
157 set_secondary_supers(NULL);
158 set_secondary_super_cache(NULL);
159 _primary_supers[0] = k;
160 set_super_check_offset(in_bytes(primary_supers_offset()));
161
162 set_java_mirror(NULL);
163 set_modifier_flags(0);
164 set_layout_helper(Klass::_lh_neutral_value);
165 set_name(NULL);
166 AccessFlags af;
167 af.set_flags(0);
168 set_access_flags(af);
169 set_subklass(NULL);
170 set_next_sibling(NULL);
171 set_next_link(NULL);
172 TRACE_INIT_ID(this);
173
174 set_prototype_header(markOopDesc::prototype());
175 set_biased_lock_revocation_count(0);
176 set_last_biased_lock_bulk_revocation_time(0);
177
178 // The klass doesn't have any references at this point.
179 clear_modified_oops();
180 clear_accumulated_modified_oops();
181 }
182
366 // make ourselves the superklass' first subklass
367 super->set_subklass(this);
368 debug_only(verify();)
369 }
370
371 bool Klass::is_loader_alive(BoolObjectClosure* is_alive) {
372 #ifdef ASSERT
373 // The class is alive iff the class loader is alive.
374 oop loader = class_loader();
375 bool loader_alive = (loader == NULL) || is_alive->do_object_b(loader);
376 #endif // ASSERT
377
378 // The class is alive if it's mirror is alive (which should be marked if the
379 // loader is alive) unless it's an anoymous class.
380 bool mirror_alive = is_alive->do_object_b(java_mirror());
381 assert(!mirror_alive || loader_alive, "loader must be alive if the mirror is"
382 " but not the other way around with anonymous classes");
383 return mirror_alive;
384 }
385
386 void Klass::clean_weak_klass_links(BoolObjectClosure* is_alive) {
387 if (!ClassUnloading) {
388 return;
389 }
390
391 Klass* root = SystemDictionary::Object_klass();
392 Stack<Klass*, mtGC> stack;
393
394 stack.push(root);
395 while (!stack.is_empty()) {
396 Klass* current = stack.pop();
397
398 assert(current->is_loader_alive(is_alive), "just checking, this should be live");
399
400 // Find and set the first alive subklass
401 Klass* sub = current->subklass();
402 while (sub != NULL && !sub->is_loader_alive(is_alive)) {
403 #ifndef PRODUCT
404 if (TraceClassUnloading && WizardMode) {
405 ResourceMark rm;
406 tty->print_cr("[Unlinking class (subclass) %s]", sub->external_name());
411 current->set_subklass(sub);
412 if (sub != NULL) {
413 stack.push(sub);
414 }
415
416 // Find and set the first alive sibling
417 Klass* sibling = current->next_sibling();
418 while (sibling != NULL && !sibling->is_loader_alive(is_alive)) {
419 if (TraceClassUnloading && WizardMode) {
420 ResourceMark rm;
421 tty->print_cr("[Unlinking class (sibling) %s]", sibling->external_name());
422 }
423 sibling = sibling->next_sibling();
424 }
425 current->set_next_sibling(sibling);
426 if (sibling != NULL) {
427 stack.push(sibling);
428 }
429
430 // Clean the implementors list and method data.
431 if (current->oop_is_instance()) {
432 InstanceKlass* ik = InstanceKlass::cast(current);
433 ik->clean_implementors_list(is_alive);
434 ik->clean_method_data(is_alive);
435 }
436 }
437 }
438
439 void Klass::klass_update_barrier_set(oop v) {
440 record_modified_oops();
441 }
442
443 void Klass::klass_update_barrier_set_pre(void* p, oop v) {
444 // This barrier used by G1, where it's used remember the old oop values,
445 // so that we don't forget any objects that were live at the snapshot at
446 // the beginning. This function is only used when we write oops into
447 // Klasses. Since the Klasses are used as roots in G1, we don't have to
448 // do anything here.
449 }
450
451 void Klass::klass_oop_store(oop* p, oop v) {
452 assert(!Universe::heap()->is_in_reserved((void*)p), "Should store pointer into metadata");
453 assert(v == NULL || Universe::heap()->is_in_reserved((void*)v), "Should store pointer to an object");
454
455 // do the store
456 if (always_do_update_barrier) {
457 klass_oop_store((volatile oop*)p, v);
458 } else {
459 klass_update_barrier_set_pre((void*)p, v);
460 *p = v;
461 klass_update_barrier_set(v);
462 }
463 }
464
465 void Klass::klass_oop_store(volatile oop* p, oop v) {
466 assert(!Universe::heap()->is_in_reserved((void*)p), "Should store pointer into metadata");
467 assert(v == NULL || Universe::heap()->is_in_reserved((void*)v), "Should store pointer to an object");
468
469 klass_update_barrier_set_pre((void*)p, v);
470 OrderAccess::release_store_ptr(p, v);
471 klass_update_barrier_set(v);
472 }
473
474 void Klass::oops_do(OopClosure* cl) {
475 cl->do_oop(&_java_mirror);
476 }
477
478 void Klass::remove_unshareable_info() {
479 assert (DumpSharedSpaces, "only called for DumpSharedSpaces");
480
481 set_subklass(NULL);
482 set_next_sibling(NULL);
483 // Clear the java mirror
484 set_java_mirror(NULL);
485 set_next_link(NULL);
486
487 // Null out class_loader_data because we don't share that yet.
488 set_class_loader_data(NULL);
489 }
|
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/dictionary.hpp"
28 #include "classfile/systemDictionary.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "gc_implementation/shared/markSweep.inline.hpp"
31 #include "gc_interface/collectedHeap.inline.hpp"
32 #include "memory/heapInspection.hpp"
33 #include "memory/metadataFactory.hpp"
34 #include "memory/oopFactory.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "oops/instanceKlass.hpp"
37 #include "oops/klass.inline.hpp"
38 #include "oops/oop.inline2.hpp"
39 #include "runtime/atomic.inline.hpp"
40 #include "runtime/orderAccess.inline.hpp"
41 #include "trace/traceMacros.hpp"
42 #include "utilities/stack.hpp"
43 #include "utilities/macros.hpp"
44 #if INCLUDE_ALL_GCS
45 #include "gc_implementation/g1/g1SATBCardTableModRefBS.hpp"
46 #include "gc_implementation/parallelScavenge/psParallelCompact.hpp"
47 #include "gc_implementation/parallelScavenge/psPromotionManager.hpp"
48 #include "gc_implementation/parallelScavenge/psScavenge.hpp"
49 #endif // INCLUDE_ALL_GCS
50
51 void Klass::set_name(Symbol* n) {
52 _name = n;
53 if (_name != NULL) _name->increment_refcount();
54 }
55
56 bool Klass::is_subclass_of(const Klass* k) const {
57 // Run up the super chain and check
58 if (this == k) return true;
59
60 Klass* t = const_cast<Klass*>(this)->super();
61
62 while (t != NULL) {
63 if (t == k) return true;
64 t = t->super();
65 }
143
144 void* Klass::operator new(size_t size, ClassLoaderData* loader_data, size_t word_size, TRAPS) throw() {
145 return Metaspace::allocate(loader_data, word_size, /*read_only*/false,
146 MetaspaceObj::ClassType, CHECK_NULL);
147 }
148
149 Klass::Klass() {
150 Klass* k = this;
151
152 // Preinitialize supertype information.
153 // A later call to initialize_supers() may update these settings:
154 set_super(NULL);
155 for (juint i = 0; i < Klass::primary_super_limit(); i++) {
156 _primary_supers[i] = NULL;
157 }
158 set_secondary_supers(NULL);
159 set_secondary_super_cache(NULL);
160 _primary_supers[0] = k;
161 set_super_check_offset(in_bytes(primary_supers_offset()));
162
163 // The constructor is used from init_self_patching_vtbl_list,
164 // which doesn't zero out the memory before calling the constructor.
165 // Need to set the field explicitly to not hit an assert that the field
166 // should be NULL before setting it.
167 _java_mirror = NULL;
168
169 set_modifier_flags(0);
170 set_layout_helper(Klass::_lh_neutral_value);
171 set_name(NULL);
172 AccessFlags af;
173 af.set_flags(0);
174 set_access_flags(af);
175 set_subklass(NULL);
176 set_next_sibling(NULL);
177 set_next_link(NULL);
178 TRACE_INIT_ID(this);
179
180 set_prototype_header(markOopDesc::prototype());
181 set_biased_lock_revocation_count(0);
182 set_last_biased_lock_bulk_revocation_time(0);
183
184 // The klass doesn't have any references at this point.
185 clear_modified_oops();
186 clear_accumulated_modified_oops();
187 }
188
372 // make ourselves the superklass' first subklass
373 super->set_subklass(this);
374 debug_only(verify();)
375 }
376
377 bool Klass::is_loader_alive(BoolObjectClosure* is_alive) {
378 #ifdef ASSERT
379 // The class is alive iff the class loader is alive.
380 oop loader = class_loader();
381 bool loader_alive = (loader == NULL) || is_alive->do_object_b(loader);
382 #endif // ASSERT
383
384 // The class is alive if it's mirror is alive (which should be marked if the
385 // loader is alive) unless it's an anoymous class.
386 bool mirror_alive = is_alive->do_object_b(java_mirror());
387 assert(!mirror_alive || loader_alive, "loader must be alive if the mirror is"
388 " but not the other way around with anonymous classes");
389 return mirror_alive;
390 }
391
392 void Klass::clean_weak_klass_links(BoolObjectClosure* is_alive, bool clean_alive_klasses) {
393 if (!ClassUnloading) {
394 return;
395 }
396
397 Klass* root = SystemDictionary::Object_klass();
398 Stack<Klass*, mtGC> stack;
399
400 stack.push(root);
401 while (!stack.is_empty()) {
402 Klass* current = stack.pop();
403
404 assert(current->is_loader_alive(is_alive), "just checking, this should be live");
405
406 // Find and set the first alive subklass
407 Klass* sub = current->subklass();
408 while (sub != NULL && !sub->is_loader_alive(is_alive)) {
409 #ifndef PRODUCT
410 if (TraceClassUnloading && WizardMode) {
411 ResourceMark rm;
412 tty->print_cr("[Unlinking class (subclass) %s]", sub->external_name());
417 current->set_subklass(sub);
418 if (sub != NULL) {
419 stack.push(sub);
420 }
421
422 // Find and set the first alive sibling
423 Klass* sibling = current->next_sibling();
424 while (sibling != NULL && !sibling->is_loader_alive(is_alive)) {
425 if (TraceClassUnloading && WizardMode) {
426 ResourceMark rm;
427 tty->print_cr("[Unlinking class (sibling) %s]", sibling->external_name());
428 }
429 sibling = sibling->next_sibling();
430 }
431 current->set_next_sibling(sibling);
432 if (sibling != NULL) {
433 stack.push(sibling);
434 }
435
436 // Clean the implementors list and method data.
437 if (clean_alive_klasses && current->oop_is_instance()) {
438 InstanceKlass* ik = InstanceKlass::cast(current);
439 ik->clean_implementors_list(is_alive);
440 ik->clean_method_data(is_alive);
441 }
442 }
443 }
444
445 void Klass::klass_update_barrier_set(oop v) {
446 record_modified_oops();
447 }
448
449 // This barrier is used by G1 to remember the old oop values, so
450 // that we don't forget any objects that were live at the snapshot at
451 // the beginning. This function is only used when we write oops into Klasses.
452 void Klass::klass_update_barrier_set_pre(oop* p, oop v) {
453 #if INCLUDE_ALL_GCS
454 if (UseG1GC) {
455 oop obj = *p;
456 if (obj != NULL) {
457 G1SATBCardTableModRefBS::enqueue(obj);
458 }
459 }
460 #endif
461 }
462
463 void Klass::klass_oop_store(oop* p, oop v) {
464 assert(!Universe::heap()->is_in_reserved((void*)p), "Should store pointer into metadata");
465 assert(v == NULL || Universe::heap()->is_in_reserved((void*)v), "Should store pointer to an object");
466
467 // do the store
468 if (always_do_update_barrier) {
469 klass_oop_store((volatile oop*)p, v);
470 } else {
471 klass_update_barrier_set_pre(p, v);
472 *p = v;
473 klass_update_barrier_set(v);
474 }
475 }
476
477 void Klass::klass_oop_store(volatile oop* p, oop v) {
478 assert(!Universe::heap()->is_in_reserved((void*)p), "Should store pointer into metadata");
479 assert(v == NULL || Universe::heap()->is_in_reserved((void*)v), "Should store pointer to an object");
480
481 klass_update_barrier_set_pre((oop*)p, v); // Cast away volatile.
482 OrderAccess::release_store_ptr(p, v);
483 klass_update_barrier_set(v);
484 }
485
486 void Klass::oops_do(OopClosure* cl) {
487 cl->do_oop(&_java_mirror);
488 }
489
490 void Klass::remove_unshareable_info() {
491 assert (DumpSharedSpaces, "only called for DumpSharedSpaces");
492
493 set_subklass(NULL);
494 set_next_sibling(NULL);
495 // Clear the java mirror
496 set_java_mirror(NULL);
497 set_next_link(NULL);
498
499 // Null out class_loader_data because we don't share that yet.
500 set_class_loader_data(NULL);
501 }
|