98 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
99 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
100
101 mark_sweep_phase3(level);
102
103 mark_sweep_phase4();
104
105 restore_marks();
106
107 // Set saved marks for allocation profiler (and other things? -- dld)
108 // (Should this be in general part?)
109 gch->save_marks();
110
111 deallocate_stacks();
112
113 // If compaction completely evacuated all generations younger than this
114 // one, then we can clear the card table. Otherwise, we must invalidate
115 // it (consider all cards dirty). In the future, we might consider doing
116 // compaction within generations only, and doing card-table sliding.
117 bool all_empty = true;
118 for (int i = 0; all_empty && i < level; i++) {
119 Generation* g = gch->get_gen(i);
120 all_empty = all_empty && gch->get_gen(i)->used() == 0;
121 }
122 GenRemSet* rs = gch->rem_set();
123 Generation* old_gen = gch->get_gen(level);
124 // Clear/invalidate below make use of the "prev_used_regions" saved earlier.
125 if (all_empty) {
126 // We've evacuated all generations below us.
127 rs->clear_into_younger(old_gen);
128 } else {
129 // Invalidate the cards corresponding to the currently used
130 // region and clear those corresponding to the evacuated region.
131 rs->invalidate_or_clear(old_gen);
132 }
133
134 CodeCache::gc_epilogue();
135 JvmtiExport::gc_epilogue();
136
137 if (PrintGC && !PrintGCDetails) {
138 gch->print_heap_change(gch_prev_used);
139 }
140
141 // refs processing: clean slate
142 _ref_processor = NULL;
143
144 // Update heap occupancy information which is used as
145 // input to soft ref clearing policy at the next gc.
146 Universe::update_heap_info_at_gc();
147
148 // Update time of last gc for all generations we collected
149 // (which currently is all the generations in the heap).
150 // We need to use a monotonically non-decreasing time in ms
151 // or we will see time-warp warnings and os::javaTimeMillis()
152 // does not guarantee monotonicity.
153 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
154 gch->update_time_of_last_gc(now);
155
156 gch->trace_heap_after_gc(_gc_tracer);
157 }
158
159 void GenMarkSweep::allocate_stacks() {
160 GenCollectedHeap* gch = GenCollectedHeap::heap();
161 // Scratch request on behalf of oldest generation; will do no
162 // allocation.
163 ScratchBlock* scratch = gch->gather_scratch(gch->get_gen(gch->_n_gens-1), 0);
164
165 // $$$ To cut a corner, we'll only use the first scratch block, and then
166 // revert to malloc.
167 if (scratch != NULL) {
168 _preserved_count_max =
169 scratch->num_words * HeapWordSize / sizeof(PreservedMark);
170 } else {
171 _preserved_count_max = 0;
172 }
173
174 _preserved_marks = (PreservedMark*)scratch;
175 _preserved_count = 0;
176 }
177
178
179 void GenMarkSweep::deallocate_stacks() {
180 if (!UseG1GC) {
181 GenCollectedHeap* gch = GenCollectedHeap::heap();
182 gch->release_scratch();
183 }
184
185 _preserved_mark_stack.clear(true);
186 _preserved_oop_stack.clear(true);
187 _marking_stack.clear();
188 _objarray_stack.clear(true);
189 }
190
191 void GenMarkSweep::mark_sweep_phase1(int level,
192 bool clear_all_softrefs) {
193 // Recursively traverse all live objects and mark them
194 GCTraceTime tm("phase 1", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
195 trace(" 1");
196
197 GenCollectedHeap* gch = GenCollectedHeap::heap();
198
199 // Because follow_root_closure is created statically, cannot
200 // use OopsInGenClosure constructor which takes a generation,
201 // as the Universe has not been created when the static constructors
202 // are run.
203 follow_root_closure.set_orig_generation(gch->get_gen(level));
204
205 // Need new claim bits before marking starts.
206 ClassLoaderDataGraph::clear_claimed_marks();
207
208 gch->gen_process_roots(level,
209 false, // Younger gens are not roots.
210 true, // activate StrongRootsScope
211 SharedHeap::SO_None,
212 GenCollectedHeap::StrongRootsOnly,
213 &follow_root_closure,
214 &follow_root_closure,
215 &follow_cld_closure);
216
217 // Process reference objects found during marking
218 {
219 ref_processor()->setup_policy(clear_all_softrefs);
220 const ReferenceProcessorStats& stats =
221 ref_processor()->process_discovered_references(
222 &is_alive, &keep_alive, &follow_stack_closure, NULL, _gc_timer, _gc_tracer->gc_id());
223 gc_tracer()->report_gc_reference_stats(stats);
271 public:
272 void do_generation(Generation* gen) {
273 gen->adjust_pointers();
274 }
275 };
276
277 void GenMarkSweep::mark_sweep_phase3(int level) {
278 GenCollectedHeap* gch = GenCollectedHeap::heap();
279
280 // Adjust the pointers to reflect the new locations
281 GCTraceTime tm("phase 3", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
282 trace("3");
283
284 // Need new claim bits for the pointer adjustment tracing.
285 ClassLoaderDataGraph::clear_claimed_marks();
286
287 // Because the closure below is created statically, we cannot
288 // use OopsInGenClosure constructor which takes a generation,
289 // as the Universe has not been created when the static constructors
290 // are run.
291 adjust_pointer_closure.set_orig_generation(gch->get_gen(level));
292
293 gch->gen_process_roots(level,
294 false, // Younger gens are not roots.
295 true, // activate StrongRootsScope
296 SharedHeap::SO_AllCodeCache,
297 GenCollectedHeap::StrongAndWeakRoots,
298 &adjust_pointer_closure,
299 &adjust_pointer_closure,
300 &adjust_cld_closure);
301
302 gch->gen_process_weak_roots(&adjust_pointer_closure);
303
304 adjust_marks();
305 GenAdjustPointersClosure blk;
306 gch->generation_iterate(&blk, true);
307 }
308
309 class GenCompactClosure: public GenCollectedHeap::GenClosure {
310 public:
311 void do_generation(Generation* gen) {
|
98 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
99 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
100
101 mark_sweep_phase3(level);
102
103 mark_sweep_phase4();
104
105 restore_marks();
106
107 // Set saved marks for allocation profiler (and other things? -- dld)
108 // (Should this be in general part?)
109 gch->save_marks();
110
111 deallocate_stacks();
112
113 // If compaction completely evacuated all generations younger than this
114 // one, then we can clear the card table. Otherwise, we must invalidate
115 // it (consider all cards dirty). In the future, we might consider doing
116 // compaction within generations only, and doing card-table sliding.
117 bool all_empty = true;
118 if (level == 1) {
119 all_empty = gch->young_gen()->used() == 0;
120 }
121
122 GenRemSet* rs = gch->rem_set();
123 assert(level == 1, "Code will break if this isn't true.");
124 Generation* old_gen = gch->old_gen();
125 // Clear/invalidate below make use of the "prev_used_regions" saved earlier.
126 if (all_empty) {
127 // We've evacuated all generations below us.
128 rs->clear_into_younger(old_gen);
129 } else {
130 // Invalidate the cards corresponding to the currently used
131 // region and clear those corresponding to the evacuated region.
132 rs->invalidate_or_clear(old_gen);
133 }
134
135 CodeCache::gc_epilogue();
136 JvmtiExport::gc_epilogue();
137
138 if (PrintGC && !PrintGCDetails) {
139 gch->print_heap_change(gch_prev_used);
140 }
141
142 // refs processing: clean slate
143 _ref_processor = NULL;
144
145 // Update heap occupancy information which is used as
146 // input to soft ref clearing policy at the next gc.
147 Universe::update_heap_info_at_gc();
148
149 // Update time of last gc for all generations we collected
150 // (which currently is all the generations in the heap).
151 // We need to use a monotonically non-decreasing time in ms
152 // or we will see time-warp warnings and os::javaTimeMillis()
153 // does not guarantee monotonicity.
154 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
155 gch->update_time_of_last_gc(now);
156
157 gch->trace_heap_after_gc(_gc_tracer);
158 }
159
160 void GenMarkSweep::allocate_stacks() {
161 GenCollectedHeap* gch = GenCollectedHeap::heap();
162 // Scratch request on behalf of oldest generation; will do no
163 // allocation.
164 ScratchBlock* scratch = gch->gather_scratch(gch->old_gen(), 0);
165
166 // $$$ To cut a corner, we'll only use the first scratch block, and then
167 // revert to malloc.
168 if (scratch != NULL) {
169 _preserved_count_max =
170 scratch->num_words * HeapWordSize / sizeof(PreservedMark);
171 } else {
172 _preserved_count_max = 0;
173 }
174
175 _preserved_marks = (PreservedMark*)scratch;
176 _preserved_count = 0;
177 }
178
179
180 void GenMarkSweep::deallocate_stacks() {
181 if (!UseG1GC) {
182 GenCollectedHeap* gch = GenCollectedHeap::heap();
183 gch->release_scratch();
184 }
185
186 _preserved_mark_stack.clear(true);
187 _preserved_oop_stack.clear(true);
188 _marking_stack.clear();
189 _objarray_stack.clear(true);
190 }
191
192 void GenMarkSweep::mark_sweep_phase1(int level,
193 bool clear_all_softrefs) {
194 // Recursively traverse all live objects and mark them
195 GCTraceTime tm("phase 1", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
196 trace(" 1");
197
198 GenCollectedHeap* gch = GenCollectedHeap::heap();
199
200 // Because follow_root_closure is created statically, cannot
201 // use OopsInGenClosure constructor which takes a generation,
202 // as the Universe has not been created when the static constructors
203 // are run.
204 assert(level == 1, "We don't use mark-sweep on young generations");
205 follow_root_closure.set_orig_generation(gch->old_gen());
206
207 // Need new claim bits before marking starts.
208 ClassLoaderDataGraph::clear_claimed_marks();
209
210 gch->gen_process_roots(level,
211 false, // Younger gens are not roots.
212 true, // activate StrongRootsScope
213 SharedHeap::SO_None,
214 GenCollectedHeap::StrongRootsOnly,
215 &follow_root_closure,
216 &follow_root_closure,
217 &follow_cld_closure);
218
219 // Process reference objects found during marking
220 {
221 ref_processor()->setup_policy(clear_all_softrefs);
222 const ReferenceProcessorStats& stats =
223 ref_processor()->process_discovered_references(
224 &is_alive, &keep_alive, &follow_stack_closure, NULL, _gc_timer, _gc_tracer->gc_id());
225 gc_tracer()->report_gc_reference_stats(stats);
273 public:
274 void do_generation(Generation* gen) {
275 gen->adjust_pointers();
276 }
277 };
278
279 void GenMarkSweep::mark_sweep_phase3(int level) {
280 GenCollectedHeap* gch = GenCollectedHeap::heap();
281
282 // Adjust the pointers to reflect the new locations
283 GCTraceTime tm("phase 3", PrintGC && Verbose, true, _gc_timer, _gc_tracer->gc_id());
284 trace("3");
285
286 // Need new claim bits for the pointer adjustment tracing.
287 ClassLoaderDataGraph::clear_claimed_marks();
288
289 // Because the closure below is created statically, we cannot
290 // use OopsInGenClosure constructor which takes a generation,
291 // as the Universe has not been created when the static constructors
292 // are run.
293 assert(level == 1, "We don't use mark-sweep on young generations.");
294 adjust_pointer_closure.set_orig_generation(gch->old_gen());
295
296 gch->gen_process_roots(level,
297 false, // Younger gens are not roots.
298 true, // activate StrongRootsScope
299 SharedHeap::SO_AllCodeCache,
300 GenCollectedHeap::StrongAndWeakRoots,
301 &adjust_pointer_closure,
302 &adjust_pointer_closure,
303 &adjust_cld_closure);
304
305 gch->gen_process_weak_roots(&adjust_pointer_closure);
306
307 adjust_marks();
308 GenAdjustPointersClosure blk;
309 gch->generation_iterate(&blk, true);
310 }
311
312 class GenCompactClosure: public GenCollectedHeap::GenClosure {
313 public:
314 void do_generation(Generation* gen) {
|