200 "we should have already filtered out humongous regions");
201 assert(_end == orig_end(),
202 "we should have already filtered out humongous regions");
203
204 _in_collection_set = false;
205
206 set_allocation_context(AllocationContext::system());
207 set_young_index_in_cset(-1);
208 uninstall_surv_rate_group();
209 set_free();
210 reset_pre_dummy_top();
211
212 if (!par) {
213 // If this is parallel, this will be done later.
214 HeapRegionRemSet* hrrs = rem_set();
215 if (locked) {
216 hrrs->clear_locked();
217 } else {
218 hrrs->clear();
219 }
220 _claimed = InitialClaimValue;
221 }
222 zero_marked_bytes();
223
224 _offsets.resize(HeapRegion::GrainWords);
225 init_top_at_mark_start();
226 if (clear_space) clear(SpaceDecorator::Mangle);
227 }
228
229 void HeapRegion::par_clear() {
230 assert(used() == 0, "the region should have been already cleared");
231 assert(capacity() == HeapRegion::GrainBytes, "should be back to normal");
232 HeapRegionRemSet* hrrs = rem_set();
233 hrrs->clear();
234 CardTableModRefBS* ct_bs =
235 (CardTableModRefBS*)G1CollectedHeap::heap()->barrier_set();
236 ct_bs->clear(MemRegion(bottom(), end()));
237 }
238
239 void HeapRegion::calc_gc_efficiency() {
240 // GC efficiency is the ratio of how much space would be
277
278 void HeapRegion::clear_humongous() {
279 assert(is_humongous(), "pre-condition");
280
281 if (is_starts_humongous()) {
282 assert(top() <= end(), "pre-condition");
283 set_end(orig_end());
284 if (top() > end()) {
285 // at least one "continues humongous" region after it
286 set_top(end());
287 }
288 } else {
289 // continues humongous
290 assert(end() == orig_end(), "sanity");
291 }
292
293 assert(capacity() == HeapRegion::GrainBytes, "pre-condition");
294 _humongous_start_region = NULL;
295 }
296
297 bool HeapRegion::claimHeapRegion(jint claimValue) {
298 jint current = _claimed;
299 if (current != claimValue) {
300 jint res = Atomic::cmpxchg(claimValue, &_claimed, current);
301 if (res == current) {
302 return true;
303 }
304 }
305 return false;
306 }
307
308 HeapRegion::HeapRegion(uint hrm_index,
309 G1BlockOffsetSharedArray* sharedOffsetArray,
310 MemRegion mr) :
311 G1OffsetTableContigSpace(sharedOffsetArray, mr),
312 _hrm_index(hrm_index),
313 _allocation_context(AllocationContext::system()),
314 _humongous_start_region(NULL),
315 _in_collection_set(false),
316 _next_in_special_set(NULL),
317 _claimed(InitialClaimValue), _evacuation_failed(false),
318 _prev_marked_bytes(0), _next_marked_bytes(0), _gc_efficiency(0.0),
319 _next_young_region(NULL),
320 _next_dirty_cards_region(NULL), _next(NULL), _prev(NULL),
321 #ifdef ASSERT
322 _containing_set(NULL),
323 #endif // ASSERT
324 _young_index_in_cset(-1), _surv_rate_group(NULL), _age_index(-1),
325 _rem_set(NULL), _recorded_rs_length(0), _predicted_elapsed_time_ms(0),
326 _predicted_bytes_to_copy(0)
327 {
328 _rem_set = new HeapRegionRemSet(sharedOffsetArray, this);
329 assert(HeapRegionRemSet::num_par_rem_sets() > 0, "Invariant.");
330
331 initialize(mr);
332 }
333
334 void HeapRegion::initialize(MemRegion mr, bool clear_space, bool mangle_space) {
335 assert(_rem_set->is_empty(), "Remembered set must be empty");
336
337 G1OffsetTableContigSpace::initialize(mr, clear_space, mangle_space);
|
200 "we should have already filtered out humongous regions");
201 assert(_end == orig_end(),
202 "we should have already filtered out humongous regions");
203
204 _in_collection_set = false;
205
206 set_allocation_context(AllocationContext::system());
207 set_young_index_in_cset(-1);
208 uninstall_surv_rate_group();
209 set_free();
210 reset_pre_dummy_top();
211
212 if (!par) {
213 // If this is parallel, this will be done later.
214 HeapRegionRemSet* hrrs = rem_set();
215 if (locked) {
216 hrrs->clear_locked();
217 } else {
218 hrrs->clear();
219 }
220 }
221 zero_marked_bytes();
222
223 _offsets.resize(HeapRegion::GrainWords);
224 init_top_at_mark_start();
225 if (clear_space) clear(SpaceDecorator::Mangle);
226 }
227
228 void HeapRegion::par_clear() {
229 assert(used() == 0, "the region should have been already cleared");
230 assert(capacity() == HeapRegion::GrainBytes, "should be back to normal");
231 HeapRegionRemSet* hrrs = rem_set();
232 hrrs->clear();
233 CardTableModRefBS* ct_bs =
234 (CardTableModRefBS*)G1CollectedHeap::heap()->barrier_set();
235 ct_bs->clear(MemRegion(bottom(), end()));
236 }
237
238 void HeapRegion::calc_gc_efficiency() {
239 // GC efficiency is the ratio of how much space would be
276
277 void HeapRegion::clear_humongous() {
278 assert(is_humongous(), "pre-condition");
279
280 if (is_starts_humongous()) {
281 assert(top() <= end(), "pre-condition");
282 set_end(orig_end());
283 if (top() > end()) {
284 // at least one "continues humongous" region after it
285 set_top(end());
286 }
287 } else {
288 // continues humongous
289 assert(end() == orig_end(), "sanity");
290 }
291
292 assert(capacity() == HeapRegion::GrainBytes, "pre-condition");
293 _humongous_start_region = NULL;
294 }
295
296 HeapRegion::HeapRegion(uint hrm_index,
297 G1BlockOffsetSharedArray* sharedOffsetArray,
298 MemRegion mr) :
299 G1OffsetTableContigSpace(sharedOffsetArray, mr),
300 _hrm_index(hrm_index),
301 _allocation_context(AllocationContext::system()),
302 _humongous_start_region(NULL),
303 _in_collection_set(false),
304 _next_in_special_set(NULL),
305 _evacuation_failed(false),
306 _prev_marked_bytes(0), _next_marked_bytes(0), _gc_efficiency(0.0),
307 _next_young_region(NULL),
308 _next_dirty_cards_region(NULL), _next(NULL), _prev(NULL),
309 #ifdef ASSERT
310 _containing_set(NULL),
311 #endif // ASSERT
312 _young_index_in_cset(-1), _surv_rate_group(NULL), _age_index(-1),
313 _rem_set(NULL), _recorded_rs_length(0), _predicted_elapsed_time_ms(0),
314 _predicted_bytes_to_copy(0)
315 {
316 _rem_set = new HeapRegionRemSet(sharedOffsetArray, this);
317 assert(HeapRegionRemSet::num_par_rem_sets() > 0, "Invariant.");
318
319 initialize(mr);
320 }
321
322 void HeapRegion::initialize(MemRegion mr, bool clear_space, bool mangle_space) {
323 assert(_rem_set->is_empty(), "Remembered set must be empty");
324
325 G1OffsetTableContigSpace::initialize(mr, clear_space, mangle_space);
|