627 void set_biased_lock_revocation_count(int val) { _biased_lock_revocation_count = (jint) val; }
628 jlong last_biased_lock_bulk_revocation_time() { return _last_biased_lock_bulk_revocation_time; }
629 void set_last_biased_lock_bulk_revocation_time(jlong cur_time) { _last_biased_lock_bulk_revocation_time = cur_time; }
630
631 TRACE_DEFINE_TRACE_ID_METHODS;
632
633 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
634 virtual MetaspaceObj::Type type() const { return ClassType; }
635
636 // Iff the class loader (or mirror for anonymous classes) is alive the
637 // Klass is considered alive. Has already been marked as unloading.
638 bool is_loader_alive() const { return !class_loader_data()->is_unloading(); }
639
640 static void clean_weak_klass_links(bool clean_alive_klasses = true);
641 static void clean_subklass_tree() {
642 clean_weak_klass_links(false /* clean_alive_klasses */);
643 }
644
645 // GC specific object visitors
646 //
647 #if INCLUDE_ALL_GCS
648 // Parallel Scavenge
649 virtual void oop_ps_push_contents( oop obj, PSPromotionManager* pm) = 0;
650 // Parallel Compact
651 virtual void oop_pc_follow_contents(oop obj, ParCompactionManager* cm) = 0;
652 virtual void oop_pc_update_pointers(oop obj, ParCompactionManager* cm) = 0;
653 #endif
654
655 // Iterators specialized to particular subtypes
656 // of ExtendedOopClosure, to avoid closure virtual calls.
657 #define Klass_OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix) \
658 virtual void oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) = 0; \
659 /* Iterates "closure" over all the oops in "obj" (of type "this") within "mr". */ \
660 virtual void oop_oop_iterate_bounded##nv_suffix(oop obj, OopClosureType* closure, MemRegion mr) = 0;
661
662 ALL_OOP_OOP_ITERATE_CLOSURES_1(Klass_OOP_OOP_ITERATE_DECL)
663 ALL_OOP_OOP_ITERATE_CLOSURES_2(Klass_OOP_OOP_ITERATE_DECL)
664
665 #if INCLUDE_ALL_GCS
666 #define Klass_OOP_OOP_ITERATE_DECL_BACKWARDS(OopClosureType, nv_suffix) \
667 virtual void oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) = 0;
668
669 ALL_OOP_OOP_ITERATE_CLOSURES_1(Klass_OOP_OOP_ITERATE_DECL_BACKWARDS)
670 ALL_OOP_OOP_ITERATE_CLOSURES_2(Klass_OOP_OOP_ITERATE_DECL_BACKWARDS)
671 #endif // INCLUDE_ALL_GCS
672
673 virtual void array_klasses_do(void f(Klass* k)) {}
674
675 // Return self, except for abstract classes with exactly 1
676 // implementor. Then return the 1 concrete implementation.
677 Klass *up_cast_abstract();
678
679 // klass name
680 Symbol* name() const { return _name; }
681 void set_name(Symbol* n);
682
683 public:
684 // jvm support
685 virtual jint compute_modifier_flags(TRAPS) const;
686
687 // JVMTI support
688 virtual jint jvmti_class_status() const;
689
690 // Printing
691 virtual void print_on(outputStream* st) const;
712 // klass encoding for klass pointer in objects.
713 static narrowKlass encode_klass_not_null(Klass* v);
714 static narrowKlass encode_klass(Klass* v);
715
716 static Klass* decode_klass_not_null(narrowKlass v);
717 static Klass* decode_klass(narrowKlass v);
718 };
719
720 // Helper to convert the oop iterate macro suffixes into bool values that can be used by template functions.
721 #define nvs_nv_to_bool true
722 #define nvs_v_to_bool false
723 #define nvs_to_bool(nv_suffix) nvs##nv_suffix##_to_bool
724
725 // Oop iteration macros for declarations.
726 // Used to generate declarations in the *Klass header files.
727
728 #define OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix) \
729 void oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure); \
730 void oop_oop_iterate_bounded##nv_suffix(oop obj, OopClosureType* closure, MemRegion mr);
731
732 #if INCLUDE_ALL_GCS
733 #define OOP_OOP_ITERATE_DECL_BACKWARDS(OopClosureType, nv_suffix) \
734 void oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure);
735 #endif // INCLUDE_ALL_GCS
736
737
738 // Oop iteration macros for definitions.
739 // Used to generate definitions in the *Klass.inline.hpp files.
740
741 #define OOP_OOP_ITERATE_DEFN(KlassType, OopClosureType, nv_suffix) \
742 void KlassType::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
743 oop_oop_iterate<nvs_to_bool(nv_suffix)>(obj, closure); \
744 }
745
746 #if INCLUDE_ALL_GCS
747 #define OOP_OOP_ITERATE_DEFN_BACKWARDS(KlassType, OopClosureType, nv_suffix) \
748 void KlassType::oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
749 oop_oop_iterate_reverse<nvs_to_bool(nv_suffix)>(obj, closure); \
750 }
751 #else
752 #define OOP_OOP_ITERATE_DEFN_BACKWARDS(KlassType, OopClosureType, nv_suffix)
753 #endif
754
755 #define OOP_OOP_ITERATE_DEFN_BOUNDED(KlassType, OopClosureType, nv_suffix) \
756 void KlassType::oop_oop_iterate_bounded##nv_suffix(oop obj, OopClosureType* closure, MemRegion mr) { \
757 oop_oop_iterate_bounded<nvs_to_bool(nv_suffix)>(obj, closure, mr); \
758 }
759
760 #endif // SHARE_VM_OOPS_KLASS_HPP
|
627 void set_biased_lock_revocation_count(int val) { _biased_lock_revocation_count = (jint) val; }
628 jlong last_biased_lock_bulk_revocation_time() { return _last_biased_lock_bulk_revocation_time; }
629 void set_last_biased_lock_bulk_revocation_time(jlong cur_time) { _last_biased_lock_bulk_revocation_time = cur_time; }
630
631 TRACE_DEFINE_TRACE_ID_METHODS;
632
633 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
634 virtual MetaspaceObj::Type type() const { return ClassType; }
635
636 // Iff the class loader (or mirror for anonymous classes) is alive the
637 // Klass is considered alive. Has already been marked as unloading.
638 bool is_loader_alive() const { return !class_loader_data()->is_unloading(); }
639
640 static void clean_weak_klass_links(bool clean_alive_klasses = true);
641 static void clean_subklass_tree() {
642 clean_weak_klass_links(false /* clean_alive_klasses */);
643 }
644
645 // GC specific object visitors
646 //
647 #if INCLUDE_PARALLELGC
648 // Parallel Scavenge
649 virtual void oop_ps_push_contents( oop obj, PSPromotionManager* pm) = 0;
650 // Parallel Compact
651 virtual void oop_pc_follow_contents(oop obj, ParCompactionManager* cm) = 0;
652 virtual void oop_pc_update_pointers(oop obj, ParCompactionManager* cm) = 0;
653 #endif
654
655 // Iterators specialized to particular subtypes
656 // of ExtendedOopClosure, to avoid closure virtual calls.
657 #define Klass_OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix) \
658 virtual void oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) = 0; \
659 /* Iterates "closure" over all the oops in "obj" (of type "this") within "mr". */ \
660 virtual void oop_oop_iterate_bounded##nv_suffix(oop obj, OopClosureType* closure, MemRegion mr) = 0;
661
662 ALL_OOP_OOP_ITERATE_CLOSURES_1(Klass_OOP_OOP_ITERATE_DECL)
663 ALL_OOP_OOP_ITERATE_CLOSURES_2(Klass_OOP_OOP_ITERATE_DECL)
664
665 #if INCLUDE_OOP_OOP_ITERATE_BACKWARDS
666 #define Klass_OOP_OOP_ITERATE_DECL_BACKWARDS(OopClosureType, nv_suffix) \
667 virtual void oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) = 0;
668
669 ALL_OOP_OOP_ITERATE_CLOSURES_1(Klass_OOP_OOP_ITERATE_DECL_BACKWARDS)
670 ALL_OOP_OOP_ITERATE_CLOSURES_2(Klass_OOP_OOP_ITERATE_DECL_BACKWARDS)
671 #endif
672
673 virtual void array_klasses_do(void f(Klass* k)) {}
674
675 // Return self, except for abstract classes with exactly 1
676 // implementor. Then return the 1 concrete implementation.
677 Klass *up_cast_abstract();
678
679 // klass name
680 Symbol* name() const { return _name; }
681 void set_name(Symbol* n);
682
683 public:
684 // jvm support
685 virtual jint compute_modifier_flags(TRAPS) const;
686
687 // JVMTI support
688 virtual jint jvmti_class_status() const;
689
690 // Printing
691 virtual void print_on(outputStream* st) const;
712 // klass encoding for klass pointer in objects.
713 static narrowKlass encode_klass_not_null(Klass* v);
714 static narrowKlass encode_klass(Klass* v);
715
716 static Klass* decode_klass_not_null(narrowKlass v);
717 static Klass* decode_klass(narrowKlass v);
718 };
719
720 // Helper to convert the oop iterate macro suffixes into bool values that can be used by template functions.
721 #define nvs_nv_to_bool true
722 #define nvs_v_to_bool false
723 #define nvs_to_bool(nv_suffix) nvs##nv_suffix##_to_bool
724
725 // Oop iteration macros for declarations.
726 // Used to generate declarations in the *Klass header files.
727
728 #define OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix) \
729 void oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure); \
730 void oop_oop_iterate_bounded##nv_suffix(oop obj, OopClosureType* closure, MemRegion mr);
731
732 #if INCLUDE_OOP_OOP_ITERATE_BACKWARDS
733 #define OOP_OOP_ITERATE_DECL_BACKWARDS(OopClosureType, nv_suffix) \
734 void oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure);
735 #endif
736
737
738 // Oop iteration macros for definitions.
739 // Used to generate definitions in the *Klass.inline.hpp files.
740
741 #define OOP_OOP_ITERATE_DEFN(KlassType, OopClosureType, nv_suffix) \
742 void KlassType::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
743 oop_oop_iterate<nvs_to_bool(nv_suffix)>(obj, closure); \
744 }
745
746 #if INCLUDE_OOP_OOP_ITERATE_BACKWARDS
747 #define OOP_OOP_ITERATE_DEFN_BACKWARDS(KlassType, OopClosureType, nv_suffix) \
748 void KlassType::oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
749 oop_oop_iterate_reverse<nvs_to_bool(nv_suffix)>(obj, closure); \
750 }
751 #else
752 #define OOP_OOP_ITERATE_DEFN_BACKWARDS(KlassType, OopClosureType, nv_suffix)
753 #endif
754
755 #define OOP_OOP_ITERATE_DEFN_BOUNDED(KlassType, OopClosureType, nv_suffix) \
756 void KlassType::oop_oop_iterate_bounded##nv_suffix(oop obj, OopClosureType* closure, MemRegion mr) { \
757 oop_oop_iterate_bounded<nvs_to_bool(nv_suffix)>(obj, closure, mr); \
758 }
759
760 #endif // SHARE_VM_OOPS_KLASS_HPP
|