187 }
188
189 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
190 }
191
192
193 void G1MarkSweep::mark_sweep_phase2() {
194 // Now all live objects are marked, compute the new object addresses.
195
196 // It is not required that we traverse spaces in the same order in
197 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
198 // tracking expects us to do so. See comment under phase4.
199
200 GCTraceTime(Trace, gc) tm("Phase 2: Compute new object addresses", gc_timer());
201
202 prepare_compaction();
203 }
204
205 class G1AdjustPointersClosure: public HeapRegionClosure {
206 public:
207 bool doHeapRegion(HeapRegion* r) {
208 if (r->is_humongous()) {
209 if (r->is_starts_humongous()) {
210 // We must adjust the pointers on the single H object.
211 oop obj = oop(r->bottom());
212 // point all the oops to the new location
213 MarkSweep::adjust_pointers(obj);
214 }
215 } else if (!r->is_pinned()) {
216 // This really ought to be "as_CompactibleSpace"...
217 r->adjust_pointers();
218 }
219 return false;
220 }
221 };
222
223 class G1AlwaysTrueClosure: public BoolObjectClosure {
224 public:
225 bool do_object_b(oop p) { return true; }
226 };
227 static G1AlwaysTrueClosure always_true;
228
229 void G1MarkSweep::mark_sweep_phase3() {
230 G1CollectedHeap* g1h = G1CollectedHeap::heap();
231
232 // Adjust the pointers to reflect the new locations
233 GCTraceTime(Trace, gc) tm("Phase 3: Adjust pointers", gc_timer());
234
235 // Need cleared claim bits for the roots processing
236 ClassLoaderDataGraph::clear_claimed_marks();
237
238 CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations);
239 {
247 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
248
249 // Now adjust pointers in remaining weak roots. (All of which should
250 // have been cleared if they pointed to non-surviving objects.)
251 JNIHandles::weak_oops_do(&always_true, &GenMarkSweep::adjust_pointer_closure);
252
253 if (G1StringDedup::is_enabled()) {
254 G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure);
255 }
256
257 GenMarkSweep::adjust_marks();
258
259 G1AdjustPointersClosure blk;
260 g1h->heap_region_iterate(&blk);
261 }
262
263 class G1SpaceCompactClosure: public HeapRegionClosure {
264 public:
265 G1SpaceCompactClosure() {}
266
267 bool doHeapRegion(HeapRegion* hr) {
268 if (hr->is_humongous()) {
269 if (hr->is_starts_humongous()) {
270 oop obj = oop(hr->bottom());
271 if (obj->is_gc_marked()) {
272 obj->init_mark();
273 } else {
274 assert(hr->is_empty(), "Should have been cleared in phase 2.");
275 }
276 }
277 hr->reset_during_compaction();
278 } else if (!hr->is_pinned()) {
279 hr->compact();
280 }
281 return false;
282 }
283 };
284
285 void G1MarkSweep::mark_sweep_phase4() {
286 // All pointers are now adjusted, move objects accordingly
287
288 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
289 // in the same order in phase2, phase3 and phase4. We don't quite do that
290 // here (code and comment not fixed for perm removal), so we tell the validate code
291 // to use a higher index (saved from phase2) when verifying perm_gen.
292 G1CollectedHeap* g1h = G1CollectedHeap::heap();
293
294 GCTraceTime(Trace, gc) tm("Phase 4: Move objects", gc_timer());
295
296 G1SpaceCompactClosure blk;
297 g1h->heap_region_iterate(&blk);
298
299 }
300
301 void G1MarkSweep::enable_archive_object_check() {
344 _cp.threshold = hr->initialize_threshold();
345 }
346 prepare_for_compaction_work(&_cp, hr, end);
347 }
348
349 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp,
350 HeapRegion* hr,
351 HeapWord* end) {
352 hr->prepare_for_compaction(cp);
353 // Also clear the part of the card table that will be unused after
354 // compaction.
355 _mrbs->clear(MemRegion(hr->compaction_top(), end));
356 }
357
358 void G1PrepareCompactClosure::update_sets() {
359 // We'll recalculate total used bytes and recreate the free list
360 // at the end of the GC, so no point in updating those values here.
361 _g1h->remove_from_old_sets(0, _humongous_regions_removed);
362 }
363
364 bool G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) {
365 if (hr->is_humongous()) {
366 oop obj = oop(hr->humongous_start_region()->bottom());
367 if (hr->is_starts_humongous() && obj->is_gc_marked()) {
368 obj->forward_to(obj);
369 }
370 if (!obj->is_gc_marked()) {
371 free_humongous_region(hr);
372 }
373 } else if (!hr->is_pinned()) {
374 prepare_for_compaction(hr, hr->end());
375 }
376 return false;
377 }
|
187 }
188
189 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
190 }
191
192
193 void G1MarkSweep::mark_sweep_phase2() {
194 // Now all live objects are marked, compute the new object addresses.
195
196 // It is not required that we traverse spaces in the same order in
197 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
198 // tracking expects us to do so. See comment under phase4.
199
200 GCTraceTime(Trace, gc) tm("Phase 2: Compute new object addresses", gc_timer());
201
202 prepare_compaction();
203 }
204
205 class G1AdjustPointersClosure: public HeapRegionClosure {
206 public:
207 void doHeapRegion(HeapRegion* r) {
208 if (r->is_humongous()) {
209 if (r->is_starts_humongous()) {
210 // We must adjust the pointers on the single H object.
211 oop obj = oop(r->bottom());
212 // point all the oops to the new location
213 MarkSweep::adjust_pointers(obj);
214 }
215 } else if (!r->is_pinned()) {
216 // This really ought to be "as_CompactibleSpace"...
217 r->adjust_pointers();
218 }
219 }
220 };
221
222 class G1AlwaysTrueClosure: public BoolObjectClosure {
223 public:
224 bool do_object_b(oop p) { return true; }
225 };
226 static G1AlwaysTrueClosure always_true;
227
228 void G1MarkSweep::mark_sweep_phase3() {
229 G1CollectedHeap* g1h = G1CollectedHeap::heap();
230
231 // Adjust the pointers to reflect the new locations
232 GCTraceTime(Trace, gc) tm("Phase 3: Adjust pointers", gc_timer());
233
234 // Need cleared claim bits for the roots processing
235 ClassLoaderDataGraph::clear_claimed_marks();
236
237 CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations);
238 {
246 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
247
248 // Now adjust pointers in remaining weak roots. (All of which should
249 // have been cleared if they pointed to non-surviving objects.)
250 JNIHandles::weak_oops_do(&always_true, &GenMarkSweep::adjust_pointer_closure);
251
252 if (G1StringDedup::is_enabled()) {
253 G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure);
254 }
255
256 GenMarkSweep::adjust_marks();
257
258 G1AdjustPointersClosure blk;
259 g1h->heap_region_iterate(&blk);
260 }
261
262 class G1SpaceCompactClosure: public HeapRegionClosure {
263 public:
264 G1SpaceCompactClosure() {}
265
266 void doHeapRegion(HeapRegion* hr) {
267 if (hr->is_humongous()) {
268 if (hr->is_starts_humongous()) {
269 oop obj = oop(hr->bottom());
270 if (obj->is_gc_marked()) {
271 obj->init_mark();
272 } else {
273 assert(hr->is_empty(), "Should have been cleared in phase 2.");
274 }
275 }
276 hr->reset_during_compaction();
277 } else if (!hr->is_pinned()) {
278 hr->compact();
279 }
280 }
281 };
282
283 void G1MarkSweep::mark_sweep_phase4() {
284 // All pointers are now adjusted, move objects accordingly
285
286 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
287 // in the same order in phase2, phase3 and phase4. We don't quite do that
288 // here (code and comment not fixed for perm removal), so we tell the validate code
289 // to use a higher index (saved from phase2) when verifying perm_gen.
290 G1CollectedHeap* g1h = G1CollectedHeap::heap();
291
292 GCTraceTime(Trace, gc) tm("Phase 4: Move objects", gc_timer());
293
294 G1SpaceCompactClosure blk;
295 g1h->heap_region_iterate(&blk);
296
297 }
298
299 void G1MarkSweep::enable_archive_object_check() {
342 _cp.threshold = hr->initialize_threshold();
343 }
344 prepare_for_compaction_work(&_cp, hr, end);
345 }
346
347 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp,
348 HeapRegion* hr,
349 HeapWord* end) {
350 hr->prepare_for_compaction(cp);
351 // Also clear the part of the card table that will be unused after
352 // compaction.
353 _mrbs->clear(MemRegion(hr->compaction_top(), end));
354 }
355
356 void G1PrepareCompactClosure::update_sets() {
357 // We'll recalculate total used bytes and recreate the free list
358 // at the end of the GC, so no point in updating those values here.
359 _g1h->remove_from_old_sets(0, _humongous_regions_removed);
360 }
361
362 void G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) {
363 if (hr->is_humongous()) {
364 oop obj = oop(hr->humongous_start_region()->bottom());
365 if (hr->is_starts_humongous() && obj->is_gc_marked()) {
366 obj->forward_to(obj);
367 }
368 if (!obj->is_gc_marked()) {
369 free_humongous_region(hr);
370 }
371 } else if (!hr->is_pinned()) {
372 prepare_for_compaction(hr, hr->end());
373 }
374 }
|