181 TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());
182 heap->do_evacuation();
183 }
184
185 // Do an update-refs phase if required.
186 if (check_cancellation()) return;
187
188 if (heap->shenandoahPolicy()->update_refs_early()) {
189
190 VM_ShenandoahInitUpdateRefs init_update_refs;
191 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::total_pause_gross);
192 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::init_update_refs_gross);
193 VMThread::execute(&init_update_refs);
194 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::init_update_refs_gross);
195 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::total_pause_gross);
196
197 {
198 GCTraceTime(Info, gc) time("Concurrent update references ", gc_timer, GCCause::_no_gc, true);
199 heap->concurrent_update_heap_references();
200 }
201 if (check_cancellation()) return;
202
203 VM_ShenandoahFinalUpdateRefs final_update_refs;
204
205 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::total_pause_gross);
206 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::final_update_refs_gross);
207 VMThread::execute(&final_update_refs);
208 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::final_update_refs_gross);
209 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::total_pause_gross);
210 }
211
212 // Prepare for the next normal cycle:
213 if (check_cancellation()) return;
214
215 {
216 GCTraceTime(Info, gc) time("Concurrent reset bitmaps", gc_timer, GCCause::_no_gc);
217 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::reset_bitmaps);
218 WorkGang* workers = heap->workers();
219 ShenandoahPushWorkerScope scope(workers, heap->max_workers());
220 heap->reset_next_mark_bitmap(workers);
221 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::reset_bitmaps);
222 }
223
224 gc_timer->register_gc_end();
225 }
226
227 bool ShenandoahConcurrentThread::check_cancellation() {
228 ShenandoahHeap* heap = ShenandoahHeap::heap();
229 if (heap->cancelled_concgc()) {
230 assert (is_full_gc() || in_graceful_shutdown(), "Cancel GC either for Full GC, or gracefully exiting");
231 heap->gc_timer()->register_gc_end();
232 return true;
233 }
|
181 TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());
182 heap->do_evacuation();
183 }
184
185 // Do an update-refs phase if required.
186 if (check_cancellation()) return;
187
188 if (heap->shenandoahPolicy()->update_refs_early()) {
189
190 VM_ShenandoahInitUpdateRefs init_update_refs;
191 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::total_pause_gross);
192 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::init_update_refs_gross);
193 VMThread::execute(&init_update_refs);
194 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::init_update_refs_gross);
195 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::total_pause_gross);
196
197 {
198 GCTraceTime(Info, gc) time("Concurrent update references ", gc_timer, GCCause::_no_gc, true);
199 heap->concurrent_update_heap_references();
200 }
201
202 clear_full_gc = false;
203 if (heap->cancelled_concgc()) {
204 heap->shenandoahPolicy()->record_uprefs_cancelled();
205 if (_full_gc_cause == GCCause::_allocation_failure &&
206 heap->shenandoahPolicy()->handover_cancelled_uprefs()) {
207 clear_full_gc = true;
208 heap->shenandoahPolicy()->record_uprefs_degenerated();
209 } else {
210 heap->gc_timer()->register_gc_end();
211 return;
212 }
213 } else {
214 heap->shenandoahPolicy()->record_uprefs_success();
215 }
216
217 VM_ShenandoahFinalUpdateRefs final_update_refs;
218
219 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::total_pause_gross);
220 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::final_update_refs_gross);
221 VMThread::execute(&final_update_refs);
222 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::final_update_refs_gross);
223 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::total_pause_gross);
224 }
225
226 // Prepare for the next normal cycle:
227 if (check_cancellation()) return;
228
229 if (clear_full_gc) {
230 reset_full_gc();
231 }
232
233 {
234 GCTraceTime(Info, gc) time("Concurrent reset bitmaps", gc_timer, GCCause::_no_gc);
235 heap->shenandoahPolicy()->record_phase_start(ShenandoahCollectorPolicy::reset_bitmaps);
236 WorkGang* workers = heap->workers();
237 ShenandoahPushWorkerScope scope(workers, heap->max_workers());
238 heap->reset_next_mark_bitmap(workers);
239 heap->shenandoahPolicy()->record_phase_end(ShenandoahCollectorPolicy::reset_bitmaps);
240 }
241
242 gc_timer->register_gc_end();
243 }
244
245 bool ShenandoahConcurrentThread::check_cancellation() {
246 ShenandoahHeap* heap = ShenandoahHeap::heap();
247 if (heap->cancelled_concgc()) {
248 assert (is_full_gc() || in_graceful_shutdown(), "Cancel GC either for Full GC, or gracefully exiting");
249 heap->gc_timer()->register_gc_end();
250 return true;
251 }
|