57 }
58
59 static void update_derived_pointers() {
60 #if defined(COMPILER2) || INCLUDE_JVMCI
61 DerivedPointerTable::update_pointers();
62 #endif
63 }
64
65 G1FullCollector::G1FullCollector(G1FullGCScope* scope,
66 ReferenceProcessor* reference_processor,
67 G1CMBitMap* bitmap,
68 uint workers) :
69 _scope(scope),
70 _num_workers(workers),
71 _mark_bitmap(bitmap),
72 _oop_queue_set(_num_workers),
73 _array_queue_set(_num_workers),
74 _preserved_marks_set(true),
75 _reference_processor(reference_processor),
76 _serial_compaction_point(),
77 _is_alive_mutator(_reference_processor, NULL) {
78 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
79
80 _preserved_marks_set.init(_num_workers);
81 _markers = NEW_C_HEAP_ARRAY(G1FullGCMarker*, _num_workers, mtGC);
82 _compaction_points = NEW_C_HEAP_ARRAY(G1FullGCCompactionPoint*, _num_workers, mtGC);
83 for (uint i = 0; i < _num_workers; i++) {
84 _markers[i] = new G1FullGCMarker(i, _preserved_marks_set.get(i), mark_bitmap());
85 _compaction_points[i] = new G1FullGCCompactionPoint();
86 _oop_queue_set.register_queue(i, marker(i)->oop_stack());
87 _array_queue_set.register_queue(i, marker(i)->objarray_stack());
88 }
89 }
90
91 G1FullCollector::~G1FullCollector() {
92 for (uint i = 0; i < _num_workers; i++) {
93 delete _markers[i];
94 delete _compaction_points[i];
95 }
96 FREE_C_HEAP_ARRAY(G1FullGCMarker*, _markers);
97 FREE_C_HEAP_ARRAY(G1FullGCCompactionPoint*, _compaction_points);
136 update_derived_pointers();
137
138 BiasedLocking::restore_marks();
139 CodeCache::gc_epilogue();
140 JvmtiExport::gc_epilogue();
141 }
142
143 void G1FullCollector::phase1_mark_live_objects() {
144 // Recursively traverse all live objects and mark them.
145 GCTraceTime(Info, gc, phases) info("Phase 1: Mark live objects", scope()->timer());
146
147 // Do the actual marking.
148 G1FullGCMarkTask marking_task(this);
149 run_task(&marking_task);
150
151 // Process references discovered during marking.
152 G1FullGCReferenceProcessingExecutor reference_processing(this);
153 reference_processing.execute(scope()->timer(), scope()->tracer());
154
155 // Weak oops cleanup.
156 G1IsAliveClosure is_alive(mark_bitmap());
157 {
158 GCTraceTime(Debug, gc, phases) trace("Phase 1: Weak Processing", scope()->timer());
159 WeakProcessor::weak_oops_do(&is_alive, &do_nothing_cl);
160 }
161
162 // Class unloading and cleanup.
163 if (ClassUnloading) {
164 GCTraceTime(Debug, gc, phases) debug("Phase 1: Class Unloading and Cleanup", scope()->timer());
165 // Unload classes and purge the SystemDictionary.
166 bool purged_class = SystemDictionary::do_unloading(&is_alive, scope()->timer());
167 G1CollectedHeap::heap()->complete_cleaning(&is_alive, purged_class);
168 } else {
169 GCTraceTime(Debug, gc, phases) debug("Phase 1: String and Symbol Tables Cleanup", scope()->timer());
170 // If no class unloading just clean out strings and symbols.
171 G1CollectedHeap::heap()->partial_cleaning(&is_alive, true, true, G1StringDedup::is_enabled());
172 }
173
174 scope()->tracer()->report_object_count_after_gc(&is_alive);
175 }
176
177 void G1FullCollector::prepare_compaction_common() {
178 G1FullGCPrepareTask task(this);
179 run_task(&task);
180
181 // To avoid OOM when there is memory left.
182 if (!task.has_freed_regions()) {
183 task.prepare_serial_compaction();
184 }
185 }
186
187 void G1FullCollector::phase2_prepare_compaction() {
188 GCTraceTime(Info, gc, phases) info("Phase 2: Prepare for compaction", scope()->timer());
189 prepare_compaction_ext(); // Will call prepare_compaction_common() above.
190 }
191
192 void G1FullCollector::phase3_adjust_pointers() {
193 // Adjust the pointers to reflect the new locations
194 GCTraceTime(Info, gc, phases) info("Phase 3: Adjust pointers and remembered sets", scope()->timer());
|
57 }
58
59 static void update_derived_pointers() {
60 #if defined(COMPILER2) || INCLUDE_JVMCI
61 DerivedPointerTable::update_pointers();
62 #endif
63 }
64
65 G1FullCollector::G1FullCollector(G1FullGCScope* scope,
66 ReferenceProcessor* reference_processor,
67 G1CMBitMap* bitmap,
68 uint workers) :
69 _scope(scope),
70 _num_workers(workers),
71 _mark_bitmap(bitmap),
72 _oop_queue_set(_num_workers),
73 _array_queue_set(_num_workers),
74 _preserved_marks_set(true),
75 _reference_processor(reference_processor),
76 _serial_compaction_point(),
77 _is_alive(_mark_bitmap),
78 _is_alive_mutator(_reference_processor, &_is_alive) {
79 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
80
81 _preserved_marks_set.init(_num_workers);
82 _markers = NEW_C_HEAP_ARRAY(G1FullGCMarker*, _num_workers, mtGC);
83 _compaction_points = NEW_C_HEAP_ARRAY(G1FullGCCompactionPoint*, _num_workers, mtGC);
84 for (uint i = 0; i < _num_workers; i++) {
85 _markers[i] = new G1FullGCMarker(i, _preserved_marks_set.get(i), mark_bitmap());
86 _compaction_points[i] = new G1FullGCCompactionPoint();
87 _oop_queue_set.register_queue(i, marker(i)->oop_stack());
88 _array_queue_set.register_queue(i, marker(i)->objarray_stack());
89 }
90 }
91
92 G1FullCollector::~G1FullCollector() {
93 for (uint i = 0; i < _num_workers; i++) {
94 delete _markers[i];
95 delete _compaction_points[i];
96 }
97 FREE_C_HEAP_ARRAY(G1FullGCMarker*, _markers);
98 FREE_C_HEAP_ARRAY(G1FullGCCompactionPoint*, _compaction_points);
137 update_derived_pointers();
138
139 BiasedLocking::restore_marks();
140 CodeCache::gc_epilogue();
141 JvmtiExport::gc_epilogue();
142 }
143
144 void G1FullCollector::phase1_mark_live_objects() {
145 // Recursively traverse all live objects and mark them.
146 GCTraceTime(Info, gc, phases) info("Phase 1: Mark live objects", scope()->timer());
147
148 // Do the actual marking.
149 G1FullGCMarkTask marking_task(this);
150 run_task(&marking_task);
151
152 // Process references discovered during marking.
153 G1FullGCReferenceProcessingExecutor reference_processing(this);
154 reference_processing.execute(scope()->timer(), scope()->tracer());
155
156 // Weak oops cleanup.
157 {
158 GCTraceTime(Debug, gc, phases) trace("Phase 1: Weak Processing", scope()->timer());
159 WeakProcessor::weak_oops_do(&_is_alive, &do_nothing_cl);
160 }
161
162 // Class unloading and cleanup.
163 if (ClassUnloading) {
164 GCTraceTime(Debug, gc, phases) debug("Phase 1: Class Unloading and Cleanup", scope()->timer());
165 // Unload classes and purge the SystemDictionary.
166 bool purged_class = SystemDictionary::do_unloading(&_is_alive, scope()->timer());
167 G1CollectedHeap::heap()->complete_cleaning(&_is_alive, purged_class);
168 } else {
169 GCTraceTime(Debug, gc, phases) debug("Phase 1: String and Symbol Tables Cleanup", scope()->timer());
170 // If no class unloading just clean out strings and symbols.
171 G1CollectedHeap::heap()->partial_cleaning(&_is_alive, true, true, G1StringDedup::is_enabled());
172 }
173
174 scope()->tracer()->report_object_count_after_gc(&_is_alive);
175 }
176
177 void G1FullCollector::prepare_compaction_common() {
178 G1FullGCPrepareTask task(this);
179 run_task(&task);
180
181 // To avoid OOM when there is memory left.
182 if (!task.has_freed_regions()) {
183 task.prepare_serial_compaction();
184 }
185 }
186
187 void G1FullCollector::phase2_prepare_compaction() {
188 GCTraceTime(Info, gc, phases) info("Phase 2: Prepare for compaction", scope()->timer());
189 prepare_compaction_ext(); // Will call prepare_compaction_common() above.
190 }
191
192 void G1FullCollector::phase3_adjust_pointers() {
193 // Adjust the pointers to reflect the new locations
194 GCTraceTime(Info, gc, phases) info("Phase 3: Adjust pointers and remembered sets", scope()->timer());
|