53 #include "utilities/copy.hpp"
54 #include "utilities/events.hpp"
55 #include "utilities/stack.inline.hpp"
56
57 void GenMarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, bool clear_all_softrefs) {
58 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
59
60 GenCollectedHeap* gch = GenCollectedHeap::heap();
61 #ifdef ASSERT
62 if (gch->collector_policy()->should_clear_all_soft_refs()) {
63 assert(clear_all_softrefs, "Policy should have been checked earlier");
64 }
65 #endif
66
67 // hook up weak ref data so it can be used during Mark-Sweep
68 assert(ref_processor() == NULL, "no stomping");
69 assert(rp != NULL, "should be non-NULL");
70 set_ref_processor(rp);
71 rp->setup_policy(clear_all_softrefs);
72
73 GCTraceTime t1(GCCauseString("Full GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, NULL);
74
75 gch->trace_heap_before_gc(_gc_tracer);
76
77 // When collecting the permanent generation Method*s may be moving,
78 // so we either have to flush all bcp data or convert it into bci.
79 CodeCache::gc_prologue();
80
81 // Increment the invocation count
82 _total_invocations++;
83
84 // Capture heap size before collection for printing.
85 size_t gch_prev_used = gch->used();
86
87 // Capture used regions for each generation that will be
88 // subject to collection, so that card table adjustments can
89 // be made intelligently (see clear / invalidate further below).
90 gch->save_used_regions();
91
92 allocate_stacks();
93
94 mark_sweep_phase1(clear_all_softrefs);
95
96 mark_sweep_phase2();
97
98 // Don't add any more derived pointers during phase3
99 #if defined(COMPILER2) || INCLUDE_JVMCI
100 assert(DerivedPointerTable::is_active(), "Sanity");
101 DerivedPointerTable::set_active(false);
102 #endif
103
104 mark_sweep_phase3();
105
106 mark_sweep_phase4();
116 // If compaction completely evacuated the young generation then we
117 // can clear the card table. Otherwise, we must invalidate
118 // it (consider all cards dirty). In the future, we might consider doing
119 // compaction within generations only, and doing card-table sliding.
120 CardTableRS* rs = gch->rem_set();
121 Generation* old_gen = gch->old_gen();
122
123 // Clear/invalidate below make use of the "prev_used_regions" saved earlier.
124 if (gch->young_gen()->used() == 0) {
125 // We've evacuated the young generation.
126 rs->clear_into_younger(old_gen);
127 } else {
128 // Invalidate the cards corresponding to the currently used
129 // region and clear those corresponding to the evacuated region.
130 rs->invalidate_or_clear(old_gen);
131 }
132
133 CodeCache::gc_epilogue();
134 JvmtiExport::gc_epilogue();
135
136 if (PrintGC && !PrintGCDetails) {
137 gch->print_heap_change(gch_prev_used);
138 }
139
140 // refs processing: clean slate
141 set_ref_processor(NULL);
142
143 // Update heap occupancy information which is used as
144 // input to soft ref clearing policy at the next gc.
145 Universe::update_heap_info_at_gc();
146
147 // Update time of last gc for all generations we collected
148 // (which currently is all the generations in the heap).
149 // We need to use a monotonically non-decreasing time in ms
150 // or we will see time-warp warnings and os::javaTimeMillis()
151 // does not guarantee monotonicity.
152 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
153 gch->update_time_of_last_gc(now);
154
155 gch->trace_heap_after_gc(_gc_tracer);
156 }
157
158 void GenMarkSweep::allocate_stacks() {
159 GenCollectedHeap* gch = GenCollectedHeap::heap();
171
172 _preserved_marks = (PreservedMark*)scratch;
173 _preserved_count = 0;
174 }
175
176
177 void GenMarkSweep::deallocate_stacks() {
178 if (!UseG1GC) {
179 GenCollectedHeap* gch = GenCollectedHeap::heap();
180 gch->release_scratch();
181 }
182
183 _preserved_mark_stack.clear(true);
184 _preserved_oop_stack.clear(true);
185 _marking_stack.clear();
186 _objarray_stack.clear(true);
187 }
188
189 void GenMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
190 // Recursively traverse all live objects and mark them
191 GCTraceTime tm("phase 1", PrintGC && Verbose, true, _gc_timer);
192
193 GenCollectedHeap* gch = GenCollectedHeap::heap();
194
195 // Because follow_root_closure is created statically, cannot
196 // use OopsInGenClosure constructor which takes a generation,
197 // as the Universe has not been created when the static constructors
198 // are run.
199 follow_root_closure.set_orig_generation(gch->old_gen());
200
201 // Need new claim bits before marking starts.
202 ClassLoaderDataGraph::clear_claimed_marks();
203
204 {
205 StrongRootsScope srs(1);
206
207 gch->gen_process_roots(&srs,
208 GenCollectedHeap::OldGen,
209 false, // Younger gens are not roots.
210 GenCollectedHeap::SO_None,
211 ClassUnloading,
244 gc_tracer()->report_object_count_after_gc(&is_alive);
245 }
246
247
248 void GenMarkSweep::mark_sweep_phase2() {
249 // Now all live objects are marked, compute the new object addresses.
250
251 // It is imperative that we traverse perm_gen LAST. If dead space is
252 // allowed a range of dead object may get overwritten by a dead int
253 // array. If perm_gen is not traversed last a Klass* may get
254 // overwritten. This is fine since it is dead, but if the class has dead
255 // instances we have to skip them, and in order to find their size we
256 // need the Klass*!
257 //
258 // It is not required that we traverse spaces in the same order in
259 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
260 // tracking expects us to do so. See comment under phase4.
261
262 GenCollectedHeap* gch = GenCollectedHeap::heap();
263
264 GCTraceTime tm("phase 2", PrintGC && Verbose, true, _gc_timer);
265
266 gch->prepare_for_compaction();
267 }
268
269 class GenAdjustPointersClosure: public GenCollectedHeap::GenClosure {
270 public:
271 void do_generation(Generation* gen) {
272 gen->adjust_pointers();
273 }
274 };
275
276 void GenMarkSweep::mark_sweep_phase3() {
277 GenCollectedHeap* gch = GenCollectedHeap::heap();
278
279 // Adjust the pointers to reflect the new locations
280 GCTraceTime tm("phase 3", PrintGC && Verbose, true, _gc_timer);
281
282 // Need new claim bits for the pointer adjustment tracing.
283 ClassLoaderDataGraph::clear_claimed_marks();
284
285 // Because the closure below is created statically, we cannot
286 // use OopsInGenClosure constructor which takes a generation,
287 // as the Universe has not been created when the static constructors
288 // are run.
289 adjust_pointer_closure.set_orig_generation(gch->old_gen());
290
291 {
292 StrongRootsScope srs(1);
293
294 gch->gen_process_roots(&srs,
295 GenCollectedHeap::OldGen,
296 false, // Younger gens are not roots.
297 GenCollectedHeap::SO_AllCodeCache,
298 GenCollectedHeap::StrongAndWeakRoots,
299 &adjust_pointer_closure,
300 &adjust_pointer_closure,
312 public:
313 void do_generation(Generation* gen) {
314 gen->compact();
315 }
316 };
317
318 void GenMarkSweep::mark_sweep_phase4() {
319 // All pointers are now adjusted, move objects accordingly
320
321 // It is imperative that we traverse perm_gen first in phase4. All
322 // classes must be allocated earlier than their instances, and traversing
323 // perm_gen first makes sure that all Klass*s have moved to their new
324 // location before any instance does a dispatch through it's klass!
325
326 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
327 // in the same order in phase2, phase3 and phase4. We don't quite do that
328 // here (perm_gen first rather than last), so we tell the validate code
329 // to use a higher index (saved from phase2) when verifying perm_gen.
330 GenCollectedHeap* gch = GenCollectedHeap::heap();
331
332 GCTraceTime tm("phase 4", PrintGC && Verbose, true, _gc_timer);
333
334 GenCompactClosure blk;
335 gch->generation_iterate(&blk, true);
336 }
|
53 #include "utilities/copy.hpp"
54 #include "utilities/events.hpp"
55 #include "utilities/stack.inline.hpp"
56
57 void GenMarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, bool clear_all_softrefs) {
58 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
59
60 GenCollectedHeap* gch = GenCollectedHeap::heap();
61 #ifdef ASSERT
62 if (gch->collector_policy()->should_clear_all_soft_refs()) {
63 assert(clear_all_softrefs, "Policy should have been checked earlier");
64 }
65 #endif
66
67 // hook up weak ref data so it can be used during Mark-Sweep
68 assert(ref_processor() == NULL, "no stomping");
69 assert(rp != NULL, "should be non-NULL");
70 set_ref_processor(rp);
71 rp->setup_policy(clear_all_softrefs);
72
73 GCTraceTime(Info, gc) t1("Full GC", NULL, gch->gc_cause(), true);
74
75 gch->trace_heap_before_gc(_gc_tracer);
76
77 // When collecting the permanent generation Method*s may be moving,
78 // so we either have to flush all bcp data or convert it into bci.
79 CodeCache::gc_prologue();
80
81 // Increment the invocation count
82 _total_invocations++;
83
84 // Capture used regions for each generation that will be
85 // subject to collection, so that card table adjustments can
86 // be made intelligently (see clear / invalidate further below).
87 gch->save_used_regions();
88
89 allocate_stacks();
90
91 mark_sweep_phase1(clear_all_softrefs);
92
93 mark_sweep_phase2();
94
95 // Don't add any more derived pointers during phase3
96 #if defined(COMPILER2) || INCLUDE_JVMCI
97 assert(DerivedPointerTable::is_active(), "Sanity");
98 DerivedPointerTable::set_active(false);
99 #endif
100
101 mark_sweep_phase3();
102
103 mark_sweep_phase4();
113 // If compaction completely evacuated the young generation then we
114 // can clear the card table. Otherwise, we must invalidate
115 // it (consider all cards dirty). In the future, we might consider doing
116 // compaction within generations only, and doing card-table sliding.
117 CardTableRS* rs = gch->rem_set();
118 Generation* old_gen = gch->old_gen();
119
120 // Clear/invalidate below make use of the "prev_used_regions" saved earlier.
121 if (gch->young_gen()->used() == 0) {
122 // We've evacuated the young generation.
123 rs->clear_into_younger(old_gen);
124 } else {
125 // Invalidate the cards corresponding to the currently used
126 // region and clear those corresponding to the evacuated region.
127 rs->invalidate_or_clear(old_gen);
128 }
129
130 CodeCache::gc_epilogue();
131 JvmtiExport::gc_epilogue();
132
133 // refs processing: clean slate
134 set_ref_processor(NULL);
135
136 // Update heap occupancy information which is used as
137 // input to soft ref clearing policy at the next gc.
138 Universe::update_heap_info_at_gc();
139
140 // Update time of last gc for all generations we collected
141 // (which currently is all the generations in the heap).
142 // We need to use a monotonically non-decreasing time in ms
143 // or we will see time-warp warnings and os::javaTimeMillis()
144 // does not guarantee monotonicity.
145 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
146 gch->update_time_of_last_gc(now);
147
148 gch->trace_heap_after_gc(_gc_tracer);
149 }
150
151 void GenMarkSweep::allocate_stacks() {
152 GenCollectedHeap* gch = GenCollectedHeap::heap();
164
165 _preserved_marks = (PreservedMark*)scratch;
166 _preserved_count = 0;
167 }
168
169
170 void GenMarkSweep::deallocate_stacks() {
171 if (!UseG1GC) {
172 GenCollectedHeap* gch = GenCollectedHeap::heap();
173 gch->release_scratch();
174 }
175
176 _preserved_mark_stack.clear(true);
177 _preserved_oop_stack.clear(true);
178 _marking_stack.clear();
179 _objarray_stack.clear(true);
180 }
181
182 void GenMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
183 // Recursively traverse all live objects and mark them
184 GCTraceTime(Trace, gc) tm("phase 1: Mark live objects", _gc_timer);
185
186 GenCollectedHeap* gch = GenCollectedHeap::heap();
187
188 // Because follow_root_closure is created statically, cannot
189 // use OopsInGenClosure constructor which takes a generation,
190 // as the Universe has not been created when the static constructors
191 // are run.
192 follow_root_closure.set_orig_generation(gch->old_gen());
193
194 // Need new claim bits before marking starts.
195 ClassLoaderDataGraph::clear_claimed_marks();
196
197 {
198 StrongRootsScope srs(1);
199
200 gch->gen_process_roots(&srs,
201 GenCollectedHeap::OldGen,
202 false, // Younger gens are not roots.
203 GenCollectedHeap::SO_None,
204 ClassUnloading,
237 gc_tracer()->report_object_count_after_gc(&is_alive);
238 }
239
240
241 void GenMarkSweep::mark_sweep_phase2() {
242 // Now all live objects are marked, compute the new object addresses.
243
244 // It is imperative that we traverse perm_gen LAST. If dead space is
245 // allowed a range of dead object may get overwritten by a dead int
246 // array. If perm_gen is not traversed last a Klass* may get
247 // overwritten. This is fine since it is dead, but if the class has dead
248 // instances we have to skip them, and in order to find their size we
249 // need the Klass*!
250 //
251 // It is not required that we traverse spaces in the same order in
252 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
253 // tracking expects us to do so. See comment under phase4.
254
255 GenCollectedHeap* gch = GenCollectedHeap::heap();
256
257 GCTraceTime(Trace, gc) tm("phase 2: Compute new object addresses", _gc_timer);
258
259 gch->prepare_for_compaction();
260 }
261
262 class GenAdjustPointersClosure: public GenCollectedHeap::GenClosure {
263 public:
264 void do_generation(Generation* gen) {
265 gen->adjust_pointers();
266 }
267 };
268
269 void GenMarkSweep::mark_sweep_phase3() {
270 GenCollectedHeap* gch = GenCollectedHeap::heap();
271
272 // Adjust the pointers to reflect the new locations
273 GCTraceTime(Trace, gc) tm("phase 3: Adjust pointers", _gc_timer);
274
275 // Need new claim bits for the pointer adjustment tracing.
276 ClassLoaderDataGraph::clear_claimed_marks();
277
278 // Because the closure below is created statically, we cannot
279 // use OopsInGenClosure constructor which takes a generation,
280 // as the Universe has not been created when the static constructors
281 // are run.
282 adjust_pointer_closure.set_orig_generation(gch->old_gen());
283
284 {
285 StrongRootsScope srs(1);
286
287 gch->gen_process_roots(&srs,
288 GenCollectedHeap::OldGen,
289 false, // Younger gens are not roots.
290 GenCollectedHeap::SO_AllCodeCache,
291 GenCollectedHeap::StrongAndWeakRoots,
292 &adjust_pointer_closure,
293 &adjust_pointer_closure,
305 public:
306 void do_generation(Generation* gen) {
307 gen->compact();
308 }
309 };
310
311 void GenMarkSweep::mark_sweep_phase4() {
312 // All pointers are now adjusted, move objects accordingly
313
314 // It is imperative that we traverse perm_gen first in phase4. All
315 // classes must be allocated earlier than their instances, and traversing
316 // perm_gen first makes sure that all Klass*s have moved to their new
317 // location before any instance does a dispatch through it's klass!
318
319 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
320 // in the same order in phase2, phase3 and phase4. We don't quite do that
321 // here (perm_gen first rather than last), so we tell the validate code
322 // to use a higher index (saved from phase2) when verifying perm_gen.
323 GenCollectedHeap* gch = GenCollectedHeap::heap();
324
325 GCTraceTime(Trace, gc) tm("phase 4: Move objects", _gc_timer);
326
327 GenCompactClosure blk;
328 gch->generation_iterate(&blk, true);
329 }
|