39 #include "gc/shared/gcTrace.hpp"
40 #include "gc/shared/gcTraceTime.inline.hpp"
41 #include "gc/shared/genCollectedHeap.hpp"
42 #include "gc/shared/modRefBarrierSet.hpp"
43 #include "gc/shared/referencePolicy.hpp"
44 #include "gc/shared/space.hpp"
45 #include "oops/instanceRefKlass.hpp"
46 #include "oops/oop.inline.hpp"
47 #include "prims/jvmtiExport.hpp"
48 #include "runtime/atomic.hpp"
49 #include "runtime/biasedLocking.hpp"
50 #include "runtime/fprofiler.hpp"
51 #include "runtime/synchronizer.hpp"
52 #include "runtime/thread.hpp"
53 #include "runtime/vmThread.hpp"
54 #include "utilities/copy.hpp"
55 #include "utilities/events.hpp"
56
57 class HeapRegion;
58
59 bool G1MarkSweep::_archive_check_enabled = false;
60 G1ArchiveRegionMap G1MarkSweep::_archive_region_map;
61
62 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
63 bool clear_all_softrefs) {
64 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
65
66 #ifdef ASSERT
67 if (G1CollectedHeap::heap()->collector_policy()->should_clear_all_soft_refs()) {
68 assert(clear_all_softrefs, "Policy should have been checked earler");
69 }
70 #endif
71 // hook up weak ref data so it can be used during Mark-Sweep
72 assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
73 assert(rp != NULL, "should be non-NULL");
74 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition");
75
76 GenMarkSweep::set_ref_processor(rp);
77 rp->setup_policy(clear_all_softrefs);
78
79 // When collecting the permanent generation Method*s may be moving,
80 // so we either have to flush all bcp data or convert it into bci.
81 CodeCache::gc_prologue();
295 hr->compact();
296 }
297 return false;
298 }
299 };
300
301 void G1MarkSweep::mark_sweep_phase4() {
302 // All pointers are now adjusted, move objects accordingly
303
304 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
305 // in the same order in phase2, phase3 and phase4. We don't quite do that
306 // here (code and comment not fixed for perm removal), so we tell the validate code
307 // to use a higher index (saved from phase2) when verifying perm_gen.
308 G1CollectedHeap* g1h = G1CollectedHeap::heap();
309
310 GCTraceTime(Info, gc, phases) tm("Phase 4: Move objects", gc_timer());
311
312 G1SpaceCompactClosure blk;
313 g1h->heap_region_iterate(&blk);
314
315 }
316
317 void G1MarkSweep::enable_archive_object_check() {
318 assert(!_archive_check_enabled, "archive range check already enabled");
319 _archive_check_enabled = true;
320 size_t length = Universe::heap()->max_capacity();
321 _archive_region_map.initialize((HeapWord*)Universe::heap()->base(),
322 (HeapWord*)Universe::heap()->base() + length,
323 HeapRegion::GrainBytes);
324 }
325
326 void G1MarkSweep::set_range_archive(MemRegion range, bool is_archive) {
327 assert(_archive_check_enabled, "archive range check not enabled");
328 _archive_region_map.set_by_address(range, is_archive);
329 }
330
331 bool G1MarkSweep::in_archive_range(oop object) {
332 // This is the out-of-line part of is_archive_object test, done separately
333 // to avoid additional performance impact when the check is not enabled.
334 return _archive_region_map.get_by_address((HeapWord*)object);
335 }
336
337 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) {
338 G1CollectedHeap* g1h = G1CollectedHeap::heap();
339 g1h->heap_region_iterate(blk);
340 blk->update_sets();
341 }
342
343 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) {
344 HeapWord* end = hr->end();
345 FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
346
347 hr->set_containing_set(NULL);
348 _humongous_regions_removed++;
349
350 _g1h->free_humongous_region(hr, &dummy_free_list, false /* skip_remset */);
351 prepare_for_compaction(hr, end);
352 dummy_free_list.remove_all();
353 }
354
|
39 #include "gc/shared/gcTrace.hpp"
40 #include "gc/shared/gcTraceTime.inline.hpp"
41 #include "gc/shared/genCollectedHeap.hpp"
42 #include "gc/shared/modRefBarrierSet.hpp"
43 #include "gc/shared/referencePolicy.hpp"
44 #include "gc/shared/space.hpp"
45 #include "oops/instanceRefKlass.hpp"
46 #include "oops/oop.inline.hpp"
47 #include "prims/jvmtiExport.hpp"
48 #include "runtime/atomic.hpp"
49 #include "runtime/biasedLocking.hpp"
50 #include "runtime/fprofiler.hpp"
51 #include "runtime/synchronizer.hpp"
52 #include "runtime/thread.hpp"
53 #include "runtime/vmThread.hpp"
54 #include "utilities/copy.hpp"
55 #include "utilities/events.hpp"
56
57 class HeapRegion;
58
59 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
60 bool clear_all_softrefs) {
61 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
62
63 #ifdef ASSERT
64 if (G1CollectedHeap::heap()->collector_policy()->should_clear_all_soft_refs()) {
65 assert(clear_all_softrefs, "Policy should have been checked earler");
66 }
67 #endif
68 // hook up weak ref data so it can be used during Mark-Sweep
69 assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
70 assert(rp != NULL, "should be non-NULL");
71 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition");
72
73 GenMarkSweep::set_ref_processor(rp);
74 rp->setup_policy(clear_all_softrefs);
75
76 // When collecting the permanent generation Method*s may be moving,
77 // so we either have to flush all bcp data or convert it into bci.
78 CodeCache::gc_prologue();
292 hr->compact();
293 }
294 return false;
295 }
296 };
297
298 void G1MarkSweep::mark_sweep_phase4() {
299 // All pointers are now adjusted, move objects accordingly
300
301 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
302 // in the same order in phase2, phase3 and phase4. We don't quite do that
303 // here (code and comment not fixed for perm removal), so we tell the validate code
304 // to use a higher index (saved from phase2) when verifying perm_gen.
305 G1CollectedHeap* g1h = G1CollectedHeap::heap();
306
307 GCTraceTime(Info, gc, phases) tm("Phase 4: Move objects", gc_timer());
308
309 G1SpaceCompactClosure blk;
310 g1h->heap_region_iterate(&blk);
311
312 }
313
314 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) {
315 G1CollectedHeap* g1h = G1CollectedHeap::heap();
316 g1h->heap_region_iterate(blk);
317 blk->update_sets();
318 }
319
320 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) {
321 HeapWord* end = hr->end();
322 FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
323
324 hr->set_containing_set(NULL);
325 _humongous_regions_removed++;
326
327 _g1h->free_humongous_region(hr, &dummy_free_list, false /* skip_remset */);
328 prepare_for_compaction(hr, end);
329 dummy_free_list.remove_all();
330 }
331
|