122 // The iteration is only over objects allocated at the start of the
123 // iterations; objects allocated as a result of applying the closure are
124 // not included.
125 void younger_refs_in_space_iterate(Space* sp, OopsInGenClosure* cl);
126
127 public:
128 // The set of possible generation kinds.
129 enum Name {
130 DefNew,
131 ParNew,
132 MarkSweepCompact,
133 ConcurrentMarkSweep,
134 Other
135 };
136
137 enum SomePublicConstants {
138 // Generations are GenGrain-aligned and have size that are multiples of
139 // GenGrain.
140 // Note: on ARM we add 1 bit for card_table_base to be properly aligned
141 // (we expect its low byte to be zero - see implementation of post_barrier)
142 LogOfGenGrain = 16 ARM_ONLY(+1),
143 GenGrain = 1 << LogOfGenGrain
144 };
145
146 // allocate and initialize ("weak") refs processing support
147 virtual void ref_processor_init();
148 void set_ref_processor(ReferenceProcessor* rp) {
149 assert(_ref_processor == NULL, "clobbering existing _ref_processor");
150 _ref_processor = rp;
151 }
152
153 virtual Generation::Name kind() { return Generation::Other; }
154 GenerationSpec* spec();
155
156 // This properly belongs in the collector, but for now this
157 // will do.
158 virtual bool refs_discovery_is_atomic() const { return true; }
159 virtual bool refs_discovery_is_mt() const { return false; }
160
161 // Space enquiries (results in bytes)
162 virtual size_t capacity() const = 0; // The maximum number of object bytes the
|
122 // The iteration is only over objects allocated at the start of the
123 // iterations; objects allocated as a result of applying the closure are
124 // not included.
125 void younger_refs_in_space_iterate(Space* sp, OopsInGenClosure* cl);
126
127 public:
128 // The set of possible generation kinds.
129 enum Name {
130 DefNew,
131 ParNew,
132 MarkSweepCompact,
133 ConcurrentMarkSweep,
134 Other
135 };
136
137 enum SomePublicConstants {
138 // Generations are GenGrain-aligned and have size that are multiples of
139 // GenGrain.
140 // Note: on ARM we add 1 bit for card_table_base to be properly aligned
141 // (we expect its low byte to be zero - see implementation of post_barrier)
142 LogOfGenGrain = 16 ARM32_ONLY(+1),
143 GenGrain = 1 << LogOfGenGrain
144 };
145
146 // allocate and initialize ("weak") refs processing support
147 virtual void ref_processor_init();
148 void set_ref_processor(ReferenceProcessor* rp) {
149 assert(_ref_processor == NULL, "clobbering existing _ref_processor");
150 _ref_processor = rp;
151 }
152
153 virtual Generation::Name kind() { return Generation::Other; }
154 GenerationSpec* spec();
155
156 // This properly belongs in the collector, but for now this
157 // will do.
158 virtual bool refs_discovery_is_atomic() const { return true; }
159 virtual bool refs_discovery_is_mt() const { return false; }
160
161 // Space enquiries (results in bytes)
162 virtual size_t capacity() const = 0; // The maximum number of object bytes the
|