1067 inline void oop_oop_iterate_oop_maps_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1068
1069 protected:
1070 // Iterate over all oop fields and metadata.
1071 template <bool nv, class OopClosureType>
1072 inline int oop_oop_iterate_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1073
1074 private:
1075 // Iterate over all oop fields in the oop maps.
1076 // Specialized for [T = oop] or [T = narrowOop].
1077 template <bool nv, typename T, class OopClosureType>
1078 inline void oop_oop_iterate_oop_maps_specialized_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1079
1080 // Iterate over all oop fields in one oop map.
1081 template <bool nv, typename T, class OopClosureType>
1082 inline void oop_oop_iterate_oop_map_bounded(OopMapBlock* map, oop obj, OopClosureType* closure, MemRegion mr);
1083
1084
1085 public:
1086
1087 #define InstanceKlass_OOP_OOP_ITERATE_DECL(OopClosureType, nv_suffix) \
1088 int oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure); \
1089 int oop_oop_iterate##nv_suffix##_m(oop obj, OopClosureType* closure, MemRegion mr);
1090
1091 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DECL)
1092 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DECL)
1093
1094 #if INCLUDE_ALL_GCS
1095 #define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL(OopClosureType, nv_suffix) \
1096 int oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure);
1097
1098 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL)
1099 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL)
1100 #endif // INCLUDE_ALL_GCS
1101
1102 u2 idnum_allocated_count() const { return _idnum_allocated_count; }
1103
1104 public:
1105 void set_in_error_state() {
1106 assert(DumpSharedSpaces, "only call this when dumping archive");
1107 _init_state = initialization_error;
1108 }
1109 bool check_sharing_error_state();
1110
1111 private:
1112 // initialization state
1113 #ifdef ASSERT
1114 void set_init_state(ClassState state);
1115 #else
1116 void set_init_state(ClassState state) { _init_state = (u1)state; }
1117 #endif
1118 void set_rewritten() { _misc_flags |= _misc_rewritten; }
1119 void set_init_thread(Thread *thread) { _init_thread = thread; }
|
1067 inline void oop_oop_iterate_oop_maps_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1068
1069 protected:
1070 // Iterate over all oop fields and metadata.
1071 template <bool nv, class OopClosureType>
1072 inline int oop_oop_iterate_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1073
1074 private:
1075 // Iterate over all oop fields in the oop maps.
1076 // Specialized for [T = oop] or [T = narrowOop].
1077 template <bool nv, typename T, class OopClosureType>
1078 inline void oop_oop_iterate_oop_maps_specialized_bounded(oop obj, OopClosureType* closure, MemRegion mr);
1079
1080 // Iterate over all oop fields in one oop map.
1081 template <bool nv, typename T, class OopClosureType>
1082 inline void oop_oop_iterate_oop_map_bounded(OopMapBlock* map, oop obj, OopClosureType* closure, MemRegion mr);
1083
1084
1085 public:
1086
1087 ALL_OOP_OOP_ITERATE_CLOSURES_1(OOP_OOP_ITERATE_DECL)
1088 ALL_OOP_OOP_ITERATE_CLOSURES_2(OOP_OOP_ITERATE_DECL)
1089
1090 #if INCLUDE_ALL_GCS
1091 ALL_OOP_OOP_ITERATE_CLOSURES_1(OOP_OOP_ITERATE_BACKWARDS_DECL)
1092 ALL_OOP_OOP_ITERATE_CLOSURES_2(OOP_OOP_ITERATE_BACKWARDS_DECL)
1093 #endif // INCLUDE_ALL_GCS
1094
1095 u2 idnum_allocated_count() const { return _idnum_allocated_count; }
1096
1097 public:
1098 void set_in_error_state() {
1099 assert(DumpSharedSpaces, "only call this when dumping archive");
1100 _init_state = initialization_error;
1101 }
1102 bool check_sharing_error_state();
1103
1104 private:
1105 // initialization state
1106 #ifdef ASSERT
1107 void set_init_state(ClassState state);
1108 #else
1109 void set_init_state(ClassState state) { _init_state = (u1)state; }
1110 #endif
1111 void set_rewritten() { _misc_flags |= _misc_rewritten; }
1112 void set_init_thread(Thread *thread) { _init_thread = thread; }
|