20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/stringTable.hpp"
28 #include "classfile/systemDictionary.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "code/codeCache.hpp"
31 #include "code/icBuffer.hpp"
32 #include "gc_implementation/shared/gcHeapSummary.hpp"
33 #include "gc_implementation/shared/gcTimer.hpp"
34 #include "gc_implementation/shared/gcTrace.hpp"
35 #include "gc_implementation/shared/gcTraceTime.hpp"
36 #include "gc_interface/collectedHeap.inline.hpp"
37 #include "memory/genCollectedHeap.hpp"
38 #include "memory/genMarkSweep.hpp"
39 #include "memory/genOopClosures.inline.hpp"
40 #include "memory/generation.inline.hpp"
41 #include "memory/modRefBarrierSet.hpp"
42 #include "memory/referencePolicy.hpp"
43 #include "memory/space.hpp"
44 #include "oops/instanceRefKlass.hpp"
45 #include "oops/oop.inline.hpp"
46 #include "prims/jvmtiExport.hpp"
47 #include "runtime/fprofiler.hpp"
48 #include "runtime/handles.inline.hpp"
49 #include "runtime/synchronizer.hpp"
50 #include "runtime/thread.inline.hpp"
51 #include "runtime/vmThread.hpp"
52 #include "utilities/copy.hpp"
53 #include "utilities/events.hpp"
54
55 void GenMarkSweep::invoke_at_safepoint(int level, ReferenceProcessor* rp, bool clear_all_softrefs) {
56 guarantee(level == 1, "We always collect both old and young.");
57 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
58
59 GenCollectedHeap* gch = GenCollectedHeap::heap();
60 #ifdef ASSERT
61 if (gch->collector_policy()->should_clear_all_soft_refs()) {
62 assert(clear_all_softrefs, "Policy should have been checked earlier");
63 }
64 #endif
65
66 // hook up weak ref data so it can be used during Mark-Sweep
67 assert(ref_processor() == NULL, "no stomping");
68 assert(rp != NULL, "should be non-NULL");
69 _ref_processor = rp;
70 rp->setup_policy(clear_all_softrefs);
71
72 GCTraceTime t1(GCCauseString("Full GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, NULL, _gc_tracer->gc_id());
73
74 gch->trace_heap_before_gc(_gc_tracer);
75
76 // When collecting the permanent generation Method*s may be moving,
77 // so we either have to flush all bcp data or convert it into bci.
78 CodeCache::gc_prologue();
79
80 // Increment the invocation count
81 _total_invocations++;
82
83 // Capture heap size before collection for printing.
84 size_t gch_prev_used = gch->used();
85
86 // Capture used regions for each generation that will be
87 // subject to collection, so that card table adjustments can
88 // be made intelligently (see clear / invalidate further below).
89 gch->save_used_regions(level);
90
91 allocate_stacks();
92
93 mark_sweep_phase1(level, clear_all_softrefs);
94
95 mark_sweep_phase2();
96
97 // Don't add any more derived pointers during phase3
98 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
99 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
100
101 mark_sweep_phase3(level);
102
103 mark_sweep_phase4();
104
105 restore_marks();
106
107 // Set saved marks for allocation profiler (and other things? -- dld)
108 // (Should this be in general part?)
109 gch->save_marks();
110
111 deallocate_stacks();
112
113 // If compaction completely evacuated all generations younger than this
114 // one, then we can clear the card table. Otherwise, we must invalidate
115 // it (consider all cards dirty). In the future, we might consider doing
116 // compaction within generations only, and doing card-table sliding.
117 bool all_empty = true;
118 if (level == 1) {
119 all_empty = gch->young_gen()->used() == 0;
120 }
121
122 GenRemSet* rs = gch->rem_set();
123 assert(level == 1, "Code will break if this isn't true.");
124 Generation* old_gen = gch->old_gen();
125 // Clear/invalidate below make use of the "prev_used_regions" saved earlier.
126 if (all_empty) {
127 // We've evacuated all generations below us.
128 rs->clear_into_younger(old_gen);
129 } else {
130 // Invalidate the cards corresponding to the currently used
131 // region and clear those corresponding to the evacuated region.
132 rs->invalidate_or_clear(old_gen);
133 }
134
135 CodeCache::gc_epilogue();
136 JvmtiExport::gc_epilogue();
137
138 if (PrintGC && !PrintGCDetails) {
139 gch->print_heap_change(gch_prev_used);
140 }
141
142 // refs processing: clean slate
143 _ref_processor = NULL;
144
145 // Update heap occupancy information which is used as
146 // input to soft ref clearing policy at the next gc.
172 _preserved_count_max = 0;
173 }
174
175 _preserved_marks = (PreservedMark*)scratch;
176 _preserved_count = 0;
177 }
178
179
180 void GenMarkSweep::deallocate_stacks() {
181 if (!UseG1GC) {
182 GenCollectedHeap* gch = GenCollectedHeap::heap();
183 gch->release_scratch();
184 }
185
186 _preserved_mark_stack.clear(true);
187 _preserved_oop_stack.clear(true);
188 _marking_stack.clear();
189 _objarray_stack.clear(true);
190 }
191
192 void GenMarkSweep::mark_sweep_phase1(int level,
193 bool clear_all_softrefs) {
194 // Recursively traverse all live objects and mark them
195 GCTraceTime tm("phase 1", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
196 trace(" 1");
197
198 GenCollectedHeap* gch = GenCollectedHeap::heap();
199
200 // Because follow_root_closure is created statically, cannot
201 // use OopsInGenClosure constructor which takes a generation,
202 // as the Universe has not been created when the static constructors
203 // are run.
204 assert(level == 1, "We don't use mark-sweep on young generations");
205 follow_root_closure.set_orig_generation(gch->old_gen());
206
207 // Need new claim bits before marking starts.
208 ClassLoaderDataGraph::clear_claimed_marks();
209
210 gch->gen_process_roots(level,
211 false, // Younger gens are not roots.
212 true, // activate StrongRootsScope
213 SharedHeap::SO_None,
214 GenCollectedHeap::StrongRootsOnly,
215 &follow_root_closure,
216 &follow_root_closure,
217 &follow_cld_closure);
218
219 // Process reference objects found during marking
220 {
221 ref_processor()->setup_policy(clear_all_softrefs);
222 const ReferenceProcessorStats& stats =
223 ref_processor()->process_discovered_references(
224 &is_alive, &keep_alive, &follow_stack_closure, NULL, _gc_timer, _gc_tracer->gc_id());
225 gc_tracer()->report_gc_reference_stats(stats);
226 }
227
228 // This is the point where the entire marking should have completed.
229 assert(_marking_stack.is_empty(), "Marking should have completed");
230
259 //
260 // It is not required that we traverse spaces in the same order in
261 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
262 // tracking expects us to do so. See comment under phase4.
263
264 GenCollectedHeap* gch = GenCollectedHeap::heap();
265
266 GCTraceTime tm("phase 2", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
267 trace("2");
268
269 gch->prepare_for_compaction();
270 }
271
272 class GenAdjustPointersClosure: public GenCollectedHeap::GenClosure {
273 public:
274 void do_generation(Generation* gen) {
275 gen->adjust_pointers();
276 }
277 };
278
279 void GenMarkSweep::mark_sweep_phase3(int level) {
280 GenCollectedHeap* gch = GenCollectedHeap::heap();
281
282 // Adjust the pointers to reflect the new locations
283 GCTraceTime tm("phase 3", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
284 trace("3");
285
286 // Need new claim bits for the pointer adjustment tracing.
287 ClassLoaderDataGraph::clear_claimed_marks();
288
289 // Because the closure below is created statically, we cannot
290 // use OopsInGenClosure constructor which takes a generation,
291 // as the Universe has not been created when the static constructors
292 // are run.
293 assert(level == 1, "We don't use mark-sweep on young generations.");
294 adjust_pointer_closure.set_orig_generation(gch->old_gen());
295
296 gch->gen_process_roots(level,
297 false, // Younger gens are not roots.
298 true, // activate StrongRootsScope
299 SharedHeap::SO_AllCodeCache,
300 GenCollectedHeap::StrongAndWeakRoots,
301 &adjust_pointer_closure,
302 &adjust_pointer_closure,
303 &adjust_cld_closure);
304
305 gch->gen_process_weak_roots(&adjust_pointer_closure);
306
307 adjust_marks();
308 GenAdjustPointersClosure blk;
309 gch->generation_iterate(&blk, true);
310 }
311
312 class GenCompactClosure: public GenCollectedHeap::GenClosure {
313 public:
314 void do_generation(Generation* gen) {
315 gen->compact();
316 }
|
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/stringTable.hpp"
28 #include "classfile/systemDictionary.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "code/codeCache.hpp"
31 #include "code/icBuffer.hpp"
32 #include "gc_implementation/shared/gcHeapSummary.hpp"
33 #include "gc_implementation/shared/gcTimer.hpp"
34 #include "gc_implementation/shared/gcTrace.hpp"
35 #include "gc_implementation/shared/gcTraceTime.hpp"
36 #include "gc_interface/collectedHeap.inline.hpp"
37 #include "memory/genCollectedHeap.hpp"
38 #include "memory/genMarkSweep.hpp"
39 #include "memory/genOopClosures.inline.hpp"
40 #include "memory/generation.hpp"
41 #include "memory/generation.inline.hpp"
42 #include "memory/modRefBarrierSet.hpp"
43 #include "memory/referencePolicy.hpp"
44 #include "memory/space.hpp"
45 #include "oops/instanceRefKlass.hpp"
46 #include "oops/oop.inline.hpp"
47 #include "prims/jvmtiExport.hpp"
48 #include "runtime/fprofiler.hpp"
49 #include "runtime/handles.inline.hpp"
50 #include "runtime/synchronizer.hpp"
51 #include "runtime/thread.inline.hpp"
52 #include "runtime/vmThread.hpp"
53 #include "utilities/copy.hpp"
54 #include "utilities/events.hpp"
55
56 void GenMarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, bool clear_all_softrefs) {
57 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
58
59 GenCollectedHeap* gch = GenCollectedHeap::heap();
60 #ifdef ASSERT
61 if (gch->collector_policy()->should_clear_all_soft_refs()) {
62 assert(clear_all_softrefs, "Policy should have been checked earlier");
63 }
64 #endif
65
66 // hook up weak ref data so it can be used during Mark-Sweep
67 assert(ref_processor() == NULL, "no stomping");
68 assert(rp != NULL, "should be non-NULL");
69 _ref_processor = rp;
70 rp->setup_policy(clear_all_softrefs);
71
72 GCTraceTime t1(GCCauseString("Full GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, NULL, _gc_tracer->gc_id());
73
74 gch->trace_heap_before_gc(_gc_tracer);
75
76 // When collecting the permanent generation Method*s may be moving,
77 // so we either have to flush all bcp data or convert it into bci.
78 CodeCache::gc_prologue();
79
80 // Increment the invocation count
81 _total_invocations++;
82
83 // Capture heap size before collection for printing.
84 size_t gch_prev_used = gch->used();
85
86 // Capture used regions for each generation that will be
87 // subject to collection, so that card table adjustments can
88 // be made intelligently (see clear / invalidate further below).
89 gch->save_used_regions();
90
91 allocate_stacks();
92
93 mark_sweep_phase1(clear_all_softrefs);
94
95 mark_sweep_phase2();
96
97 // Don't add any more derived pointers during phase3
98 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
99 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
100
101 mark_sweep_phase3();
102
103 mark_sweep_phase4();
104
105 restore_marks();
106
107 // Set saved marks for allocation profiler (and other things? -- dld)
108 // (Should this be in general part?)
109 gch->save_marks();
110
111 deallocate_stacks();
112
113 // If compaction completely evacuated the young generation we can clear
114 // the card table. Otherwise, we must invalidate it (consider all cards dirty).
115 // In the future, we might consider doing compaction within generations only,
116 // and doing card-table sliding.
117 GenRemSet* rs = gch->rem_set();
118 Generation* old_gen = gch->old_gen();
119 // Clear/invalidate below make use of the "prev_used_regions" saved earlier.
120 if (gch->young_gen()->used() == 0) {
121 // We've evacuated all generations below us.
122 rs->clear_into_younger(old_gen);
123 } else {
124 // Invalidate the cards corresponding to the currently used
125 // region and clear those corresponding to the evacuated region.
126 rs->invalidate_or_clear(old_gen);
127 }
128
129 CodeCache::gc_epilogue();
130 JvmtiExport::gc_epilogue();
131
132 if (PrintGC && !PrintGCDetails) {
133 gch->print_heap_change(gch_prev_used);
134 }
135
136 // refs processing: clean slate
137 _ref_processor = NULL;
138
139 // Update heap occupancy information which is used as
140 // input to soft ref clearing policy at the next gc.
166 _preserved_count_max = 0;
167 }
168
169 _preserved_marks = (PreservedMark*)scratch;
170 _preserved_count = 0;
171 }
172
173
174 void GenMarkSweep::deallocate_stacks() {
175 if (!UseG1GC) {
176 GenCollectedHeap* gch = GenCollectedHeap::heap();
177 gch->release_scratch();
178 }
179
180 _preserved_mark_stack.clear(true);
181 _preserved_oop_stack.clear(true);
182 _marking_stack.clear();
183 _objarray_stack.clear(true);
184 }
185
186 void GenMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
187 // Recursively traverse all live objects and mark them
188 GCTraceTime tm("phase 1", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
189 trace(" 1");
190
191 GenCollectedHeap* gch = GenCollectedHeap::heap();
192
193 // Because follow_root_closure is created statically, cannot
194 // use OopsInGenClosure constructor which takes a generation,
195 // as the Universe has not been created when the static constructors
196 // are run.
197 follow_root_closure.set_orig_generation(gch->old_gen());
198
199 // Need new claim bits before marking starts.
200 ClassLoaderDataGraph::clear_claimed_marks();
201
202 gch->gen_process_roots(Generation::Old,
203 false, // Younger gens are not roots.
204 true, // activate StrongRootsScope
205 SharedHeap::SO_None,
206 GenCollectedHeap::StrongRootsOnly,
207 &follow_root_closure,
208 &follow_root_closure,
209 &follow_cld_closure);
210
211 // Process reference objects found during marking
212 {
213 ref_processor()->setup_policy(clear_all_softrefs);
214 const ReferenceProcessorStats& stats =
215 ref_processor()->process_discovered_references(
216 &is_alive, &keep_alive, &follow_stack_closure, NULL, _gc_timer, _gc_tracer->gc_id());
217 gc_tracer()->report_gc_reference_stats(stats);
218 }
219
220 // This is the point where the entire marking should have completed.
221 assert(_marking_stack.is_empty(), "Marking should have completed");
222
251 //
252 // It is not required that we traverse spaces in the same order in
253 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
254 // tracking expects us to do so. See comment under phase4.
255
256 GenCollectedHeap* gch = GenCollectedHeap::heap();
257
258 GCTraceTime tm("phase 2", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
259 trace("2");
260
261 gch->prepare_for_compaction();
262 }
263
264 class GenAdjustPointersClosure: public GenCollectedHeap::GenClosure {
265 public:
266 void do_generation(Generation* gen) {
267 gen->adjust_pointers();
268 }
269 };
270
271 void GenMarkSweep::mark_sweep_phase3() {
272 GenCollectedHeap* gch = GenCollectedHeap::heap();
273
274 // Adjust the pointers to reflect the new locations
275 GCTraceTime tm("phase 3", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
276 trace("3");
277
278 // Need new claim bits for the pointer adjustment tracing.
279 ClassLoaderDataGraph::clear_claimed_marks();
280
281 // Because the closure below is created statically, we cannot
282 // use OopsInGenClosure constructor which takes a generation,
283 // as the Universe has not been created when the static constructors
284 // are run.
285 adjust_pointer_closure.set_orig_generation(gch->old_gen());
286
287 gch->gen_process_roots(Generation::Old,
288 false, // Younger gens are not roots.
289 true, // activate StrongRootsScope
290 SharedHeap::SO_AllCodeCache,
291 GenCollectedHeap::StrongAndWeakRoots,
292 &adjust_pointer_closure,
293 &adjust_pointer_closure,
294 &adjust_cld_closure);
295
296 gch->gen_process_weak_roots(&adjust_pointer_closure);
297
298 adjust_marks();
299 GenAdjustPointersClosure blk;
300 gch->generation_iterate(&blk, true);
301 }
302
303 class GenCompactClosure: public GenCollectedHeap::GenClosure {
304 public:
305 void do_generation(Generation* gen) {
306 gen->compact();
307 }
|