620 // JVM-internal oops. 621 inline void set_prototype_header(markOop header); 622 static ByteSize prototype_header_offset() { return in_ByteSize(offset_of(Klass, _prototype_header)); } 623 624 int biased_lock_revocation_count() const { return (int) _biased_lock_revocation_count; } 625 // Atomically increments biased_lock_revocation_count and returns updated value 626 int atomic_incr_biased_lock_revocation_count(); 627 void set_biased_lock_revocation_count(int val) { _biased_lock_revocation_count = (jint) val; } 628 jlong last_biased_lock_bulk_revocation_time() { return _last_biased_lock_bulk_revocation_time; } 629 void set_last_biased_lock_bulk_revocation_time(jlong cur_time) { _last_biased_lock_bulk_revocation_time = cur_time; } 630 631 TRACE_DEFINE_TRACE_ID_METHODS; 632 633 virtual void metaspace_pointers_do(MetaspaceClosure* iter); 634 virtual MetaspaceObj::Type type() const { return ClassType; } 635 636 // Iff the class loader (or mirror for anonymous classes) is alive the 637 // Klass is considered alive. Has already been marked as unloading. 638 bool is_loader_alive() const { return !class_loader_data()->is_unloading(); } 639 640 static void clean_weak_klass_links(bool clean_alive_klasses = true); 641 static void clean_subklass_tree() { 642 clean_weak_klass_links(false /* clean_alive_klasses */); 643 } 644 645 // GC specific object visitors 646 // 647 #if INCLUDE_ALL_GCS 648 // Parallel Scavenge 649 virtual void oop_ps_push_contents( oop obj, PSPromotionManager* pm) = 0; 650 // Parallel Compact 651 virtual void oop_pc_follow_contents(oop obj, ParCompactionManager* cm) = 0; 652 virtual void oop_pc_update_pointers(oop obj, ParCompactionManager* cm) = 0; 653 #endif 654 655 // Iterators specialized to particular subtypes 656 // of ExtendedOopClosure, to avoid closure virtual calls. 657 #define Klass_OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix) \ 658 virtual void oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) = 0; \ 659 /* Iterates "closure" over all the oops in "obj" (of type "this") within "mr". */ \ 660 virtual void oop_oop_iterate_bounded##nv_suffix(oop obj, OopClosureType* closure, MemRegion mr) = 0; 661 662 ALL_OOP_OOP_ITERATE_CLOSURES_1(Klass_OOP_OOP_ITERATE_DECL) | 620 // JVM-internal oops. 621 inline void set_prototype_header(markOop header); 622 static ByteSize prototype_header_offset() { return in_ByteSize(offset_of(Klass, _prototype_header)); } 623 624 int biased_lock_revocation_count() const { return (int) _biased_lock_revocation_count; } 625 // Atomically increments biased_lock_revocation_count and returns updated value 626 int atomic_incr_biased_lock_revocation_count(); 627 void set_biased_lock_revocation_count(int val) { _biased_lock_revocation_count = (jint) val; } 628 jlong last_biased_lock_bulk_revocation_time() { return _last_biased_lock_bulk_revocation_time; } 629 void set_last_biased_lock_bulk_revocation_time(jlong cur_time) { _last_biased_lock_bulk_revocation_time = cur_time; } 630 631 TRACE_DEFINE_TRACE_ID_METHODS; 632 633 virtual void metaspace_pointers_do(MetaspaceClosure* iter); 634 virtual MetaspaceObj::Type type() const { return ClassType; } 635 636 // Iff the class loader (or mirror for anonymous classes) is alive the 637 // Klass is considered alive. Has already been marked as unloading. 638 bool is_loader_alive() const { return !class_loader_data()->is_unloading(); } 639 640 static void clean_weak_klass_links(bool unloading_occurred, bool clean_alive_klasses = true); 641 static void clean_subklass_tree() { 642 clean_weak_klass_links(/*unloading_occurred*/ true , /* clean_alive_klasses */ false); 643 } 644 645 // GC specific object visitors 646 // 647 #if INCLUDE_ALL_GCS 648 // Parallel Scavenge 649 virtual void oop_ps_push_contents( oop obj, PSPromotionManager* pm) = 0; 650 // Parallel Compact 651 virtual void oop_pc_follow_contents(oop obj, ParCompactionManager* cm) = 0; 652 virtual void oop_pc_update_pointers(oop obj, ParCompactionManager* cm) = 0; 653 #endif 654 655 // Iterators specialized to particular subtypes 656 // of ExtendedOopClosure, to avoid closure virtual calls. 657 #define Klass_OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix) \ 658 virtual void oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) = 0; \ 659 /* Iterates "closure" over all the oops in "obj" (of type "this") within "mr". */ \ 660 virtual void oop_oop_iterate_bounded##nv_suffix(oop obj, OopClosureType* closure, MemRegion mr) = 0; 661 662 ALL_OOP_OOP_ITERATE_CLOSURES_1(Klass_OOP_OOP_ITERATE_DECL) |