105 BiasedLocking::restore_marks();
106 GenMarkSweep::deallocate_stacks();
107
108 CodeCache::gc_epilogue();
109 JvmtiExport::gc_epilogue();
110
111 // refs processing: clean slate
112 GenMarkSweep::set_ref_processor(NULL);
113 }
114
115
116 void G1MarkSweep::allocate_stacks() {
117 GenMarkSweep::_preserved_count_max = 0;
118 GenMarkSweep::_preserved_marks = NULL;
119 GenMarkSweep::_preserved_count = 0;
120 }
121
122 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
123 bool clear_all_softrefs) {
124 // Recursively traverse all live objects and mark them
125 GCTraceTime(Trace, gc) tm("Phase 1: Mark live objects", gc_timer());
126
127 G1CollectedHeap* g1h = G1CollectedHeap::heap();
128
129 // Need cleared claim bits for the roots processing
130 ClassLoaderDataGraph::clear_claimed_marks();
131
132 MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);
133 {
134 G1RootProcessor root_processor(g1h, 1);
135 root_processor.process_strong_roots(&GenMarkSweep::follow_root_closure,
136 &GenMarkSweep::follow_cld_closure,
137 &follow_code_closure);
138 }
139
140 // Process reference objects found during marking
141 ReferenceProcessor* rp = GenMarkSweep::ref_processor();
142 assert(rp == g1h->ref_processor_stw(), "Sanity");
143
144 rp->setup_policy(clear_all_softrefs);
145 const ReferenceProcessorStats& stats =
146 rp->process_discovered_references(&GenMarkSweep::is_alive,
147 &GenMarkSweep::keep_alive,
148 &GenMarkSweep::follow_stack_closure,
149 NULL,
150 gc_timer());
151 gc_tracer()->report_gc_reference_stats(stats);
152
153
154 // This is the point where the entire marking should have completed.
155 assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed");
156
157 {
158 GCTraceTime(Debug, gc) trace("Class Unloading", gc_timer());
159
160 // Unload classes and purge the SystemDictionary.
161 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
162
163 // Unload nmethods.
164 CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class);
165
166 // Prune dead klasses from subklass/sibling/implementor lists.
167 Klass::clean_weak_klass_links(&GenMarkSweep::is_alive);
168 }
169
170 {
171 GCTraceTime(Debug, gc) trace("Scrub String and Symbol Tables", gc_timer());
172 // Delete entries for dead interned string and clean up unreferenced symbols in symbol table.
173 g1h->unlink_string_and_symbol_table(&GenMarkSweep::is_alive);
174 }
175
176 if (G1StringDedup::is_enabled()) {
177 GCTraceTime(Debug, gc) trace("String Deduplication Unlink", gc_timer());
178 G1StringDedup::unlink(&GenMarkSweep::is_alive);
179 }
180
181 if (VerifyDuringGC) {
182 HandleMark hm; // handle scope
183 #if defined(COMPILER2) || INCLUDE_JVMCI
184 DerivedPointerTableDeactivate dpt_deact;
185 #endif
186 g1h->prepare_for_verify();
187 // Note: we can verify only the heap here. When an object is
188 // marked, the previous value of the mark word (including
189 // identity hash values, ages, etc) is preserved, and the mark
190 // word is set to markOop::marked_value - effectively removing
191 // any hash values from the mark word. These hash values are
192 // used when verifying the dictionaries and so removing them
193 // from the mark word can make verification of the dictionaries
194 // fail. At the end of the GC, the original mark word values
195 // (including hash values) are restored to the appropriate
196 // objects.
197 GCTraceTime(Info, gc, verify)("During GC (full)");
198 g1h->verify(VerifyOption_G1UseMarkWord);
199 }
200
201 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
202 }
203
204
205 void G1MarkSweep::mark_sweep_phase2() {
206 // Now all live objects are marked, compute the new object addresses.
207
208 // It is not required that we traverse spaces in the same order in
209 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
210 // tracking expects us to do so. See comment under phase4.
211
212 GCTraceTime(Trace, gc) tm("Phase 2: Compute new object addresses", gc_timer());
213
214 prepare_compaction();
215 }
216
217 class G1AdjustPointersClosure: public HeapRegionClosure {
218 public:
219 bool doHeapRegion(HeapRegion* r) {
220 if (r->is_humongous()) {
221 if (r->is_starts_humongous()) {
222 // We must adjust the pointers on the single H object.
223 oop obj = oop(r->bottom());
224 // point all the oops to the new location
225 MarkSweep::adjust_pointers(obj);
226 }
227 } else if (!r->is_pinned()) {
228 // This really ought to be "as_CompactibleSpace"...
229 r->adjust_pointers();
230 }
231 return false;
232 }
233 };
234
235 void G1MarkSweep::mark_sweep_phase3() {
236 G1CollectedHeap* g1h = G1CollectedHeap::heap();
237
238 // Adjust the pointers to reflect the new locations
239 GCTraceTime(Trace, gc) tm("Phase 3: Adjust pointers", gc_timer());
240
241 // Need cleared claim bits for the roots processing
242 ClassLoaderDataGraph::clear_claimed_marks();
243
244 CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations);
245 {
246 G1RootProcessor root_processor(g1h, 1);
247 root_processor.process_all_roots(&GenMarkSweep::adjust_pointer_closure,
248 &GenMarkSweep::adjust_cld_closure,
249 &adjust_code_closure);
250 }
251
252 assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
253 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
254
255 // Now adjust pointers in remaining weak roots. (All of which should
256 // have been cleared if they pointed to non-surviving objects.)
257 JNIHandles::weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
258
259 if (G1StringDedup::is_enabled()) {
280 assert(hr->is_empty(), "Should have been cleared in phase 2.");
281 }
282 }
283 hr->reset_during_compaction();
284 } else if (!hr->is_pinned()) {
285 hr->compact();
286 }
287 return false;
288 }
289 };
290
291 void G1MarkSweep::mark_sweep_phase4() {
292 // All pointers are now adjusted, move objects accordingly
293
294 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
295 // in the same order in phase2, phase3 and phase4. We don't quite do that
296 // here (code and comment not fixed for perm removal), so we tell the validate code
297 // to use a higher index (saved from phase2) when verifying perm_gen.
298 G1CollectedHeap* g1h = G1CollectedHeap::heap();
299
300 GCTraceTime(Trace, gc) tm("Phase 4: Move objects", gc_timer());
301
302 G1SpaceCompactClosure blk;
303 g1h->heap_region_iterate(&blk);
304
305 }
306
307 void G1MarkSweep::enable_archive_object_check() {
308 assert(!_archive_check_enabled, "archive range check already enabled");
309 _archive_check_enabled = true;
310 size_t length = Universe::heap()->max_capacity();
311 _archive_region_map.initialize((HeapWord*)Universe::heap()->base(),
312 (HeapWord*)Universe::heap()->base() + length,
313 HeapRegion::GrainBytes);
314 }
315
316 void G1MarkSweep::set_range_archive(MemRegion range, bool is_archive) {
317 assert(_archive_check_enabled, "archive range check not enabled");
318 _archive_region_map.set_by_address(range, is_archive);
319 }
320
|
105 BiasedLocking::restore_marks();
106 GenMarkSweep::deallocate_stacks();
107
108 CodeCache::gc_epilogue();
109 JvmtiExport::gc_epilogue();
110
111 // refs processing: clean slate
112 GenMarkSweep::set_ref_processor(NULL);
113 }
114
115
116 void G1MarkSweep::allocate_stacks() {
117 GenMarkSweep::_preserved_count_max = 0;
118 GenMarkSweep::_preserved_marks = NULL;
119 GenMarkSweep::_preserved_count = 0;
120 }
121
122 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
123 bool clear_all_softrefs) {
124 // Recursively traverse all live objects and mark them
125 GCTraceTime(Info, gc, phases) tm("Phase 1: Mark live objects", gc_timer());
126
127 G1CollectedHeap* g1h = G1CollectedHeap::heap();
128
129 // Need cleared claim bits for the roots processing
130 ClassLoaderDataGraph::clear_claimed_marks();
131
132 MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);
133 {
134 G1RootProcessor root_processor(g1h, 1);
135 root_processor.process_strong_roots(&GenMarkSweep::follow_root_closure,
136 &GenMarkSweep::follow_cld_closure,
137 &follow_code_closure);
138 }
139
140 {
141 GCTraceTime(Debug, gc, phases) trace("Reference Processing", gc_timer());
142
143 // Process reference objects found during marking
144 ReferenceProcessor* rp = GenMarkSweep::ref_processor();
145 assert(rp == g1h->ref_processor_stw(), "Sanity");
146
147 rp->setup_policy(clear_all_softrefs);
148 const ReferenceProcessorStats& stats =
149 rp->process_discovered_references(&GenMarkSweep::is_alive,
150 &GenMarkSweep::keep_alive,
151 &GenMarkSweep::follow_stack_closure,
152 NULL,
153 gc_timer());
154 gc_tracer()->report_gc_reference_stats(stats);
155 }
156
157 // This is the point where the entire marking should have completed.
158 assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed");
159
160 {
161 GCTraceTime(Debug, gc, phases) trace("Class Unloading", gc_timer());
162
163 // Unload classes and purge the SystemDictionary.
164 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
165
166 // Unload nmethods.
167 CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class);
168
169 // Prune dead klasses from subklass/sibling/implementor lists.
170 Klass::clean_weak_klass_links(&GenMarkSweep::is_alive);
171 }
172
173 {
174 GCTraceTime(Debug, gc, phases) trace("Scrub String and Symbol Tables", gc_timer());
175 // Delete entries for dead interned string and clean up unreferenced symbols in symbol table.
176 g1h->unlink_string_and_symbol_table(&GenMarkSweep::is_alive);
177 }
178
179 if (G1StringDedup::is_enabled()) {
180 GCTraceTime(Debug, gc, phases) trace("String Deduplication Unlink", gc_timer());
181 G1StringDedup::unlink(&GenMarkSweep::is_alive);
182 }
183
184 if (VerifyDuringGC) {
185 HandleMark hm; // handle scope
186 #if defined(COMPILER2) || INCLUDE_JVMCI
187 DerivedPointerTableDeactivate dpt_deact;
188 #endif
189 g1h->prepare_for_verify();
190 // Note: we can verify only the heap here. When an object is
191 // marked, the previous value of the mark word (including
192 // identity hash values, ages, etc) is preserved, and the mark
193 // word is set to markOop::marked_value - effectively removing
194 // any hash values from the mark word. These hash values are
195 // used when verifying the dictionaries and so removing them
196 // from the mark word can make verification of the dictionaries
197 // fail. At the end of the GC, the original mark word values
198 // (including hash values) are restored to the appropriate
199 // objects.
200 GCTraceTime(Info, gc, verify)("During GC (full)");
201 g1h->verify(VerifyOption_G1UseMarkWord);
202 }
203
204 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
205 }
206
207
208 void G1MarkSweep::mark_sweep_phase2() {
209 // Now all live objects are marked, compute the new object addresses.
210
211 // It is not required that we traverse spaces in the same order in
212 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
213 // tracking expects us to do so. See comment under phase4.
214
215 GCTraceTime(Info, gc, phases) tm("Phase 2: Compute new object addresses", gc_timer());
216
217 prepare_compaction();
218 }
219
220 class G1AdjustPointersClosure: public HeapRegionClosure {
221 public:
222 bool doHeapRegion(HeapRegion* r) {
223 if (r->is_humongous()) {
224 if (r->is_starts_humongous()) {
225 // We must adjust the pointers on the single H object.
226 oop obj = oop(r->bottom());
227 // point all the oops to the new location
228 MarkSweep::adjust_pointers(obj);
229 }
230 } else if (!r->is_pinned()) {
231 // This really ought to be "as_CompactibleSpace"...
232 r->adjust_pointers();
233 }
234 return false;
235 }
236 };
237
238 void G1MarkSweep::mark_sweep_phase3() {
239 G1CollectedHeap* g1h = G1CollectedHeap::heap();
240
241 // Adjust the pointers to reflect the new locations
242 GCTraceTime(Info, gc, phases) tm("Phase 3: Adjust pointers", gc_timer());
243
244 // Need cleared claim bits for the roots processing
245 ClassLoaderDataGraph::clear_claimed_marks();
246
247 CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations);
248 {
249 G1RootProcessor root_processor(g1h, 1);
250 root_processor.process_all_roots(&GenMarkSweep::adjust_pointer_closure,
251 &GenMarkSweep::adjust_cld_closure,
252 &adjust_code_closure);
253 }
254
255 assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
256 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
257
258 // Now adjust pointers in remaining weak roots. (All of which should
259 // have been cleared if they pointed to non-surviving objects.)
260 JNIHandles::weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
261
262 if (G1StringDedup::is_enabled()) {
283 assert(hr->is_empty(), "Should have been cleared in phase 2.");
284 }
285 }
286 hr->reset_during_compaction();
287 } else if (!hr->is_pinned()) {
288 hr->compact();
289 }
290 return false;
291 }
292 };
293
294 void G1MarkSweep::mark_sweep_phase4() {
295 // All pointers are now adjusted, move objects accordingly
296
297 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
298 // in the same order in phase2, phase3 and phase4. We don't quite do that
299 // here (code and comment not fixed for perm removal), so we tell the validate code
300 // to use a higher index (saved from phase2) when verifying perm_gen.
301 G1CollectedHeap* g1h = G1CollectedHeap::heap();
302
303 GCTraceTime(Info, gc, phases) tm("Phase 4: Move objects", gc_timer());
304
305 G1SpaceCompactClosure blk;
306 g1h->heap_region_iterate(&blk);
307
308 }
309
310 void G1MarkSweep::enable_archive_object_check() {
311 assert(!_archive_check_enabled, "archive range check already enabled");
312 _archive_check_enabled = true;
313 size_t length = Universe::heap()->max_capacity();
314 _archive_region_map.initialize((HeapWord*)Universe::heap()->base(),
315 (HeapWord*)Universe::heap()->base() + length,
316 HeapRegion::GrainBytes);
317 }
318
319 void G1MarkSweep::set_range_archive(MemRegion range, bool is_archive) {
320 assert(_archive_check_enabled, "archive range check not enabled");
321 _archive_region_map.set_by_address(range, is_archive);
322 }
323
|