237 ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
238 MemRegion old_mr = heap->old_gen()->reserved();
239 if (young_gen_empty) {
240 modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
241 } else {
242 modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
243 }
244
245 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
246 ClassLoaderDataGraph::purge();
247 MetaspaceAux::verify_metrics();
248
249 BiasedLocking::restore_marks();
250 CodeCache::gc_epilogue();
251 JvmtiExport::gc_epilogue();
252
253 #if defined(COMPILER2) || INCLUDE_JVMCI
254 DerivedPointerTable::update_pointers();
255 #endif
256
257 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q(), ref_processor()->processing_is_mt());
258
259 ref_processor()->enqueue_discovered_references(NULL, &pt);
260
261 pt.print_enqueue_phase();
262
263 // Update time of last GC
264 reset_millis_since_last_gc();
265
266 // Let the size policy know we're done
267 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
268
269 if (UseAdaptiveSizePolicy) {
270
271 log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
272 log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
273 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
274
275 // Don't check if the size_policy is ready here. Let
276 // the size_policy check that internally.
277 if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
515 Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
516 ObjectSynchronizer::oops_do(mark_and_push_closure());
517 FlatProfiler::oops_do(mark_and_push_closure());
518 Management::oops_do(mark_and_push_closure());
519 JvmtiExport::oops_do(mark_and_push_closure());
520 SystemDictionary::always_strong_oops_do(mark_and_push_closure());
521 ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
522 // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
523 //CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
524 AOTLoader::oops_do(mark_and_push_closure());
525 }
526
527 // Flush marking stack.
528 follow_stack();
529
530 // Process reference objects found during marking
531 {
532 GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
533
534 ref_processor()->setup_policy(clear_all_softrefs);
535 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q(), ref_processor()->processing_is_mt());
536 const ReferenceProcessorStats& stats =
537 ref_processor()->process_discovered_references(
538 is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
539 gc_tracer()->report_gc_reference_stats(stats);
540 pt.print_all_references();
541 }
542
543 // This is the point where the entire marking should have completed.
544 assert(_marking_stack.is_empty(), "Marking should have completed");
545
546 {
547 GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer);
548
549 // Unload classes and purge the SystemDictionary.
550 bool purged_class = SystemDictionary::do_unloading(is_alive_closure(), _gc_timer);
551
552 // Unload nmethods.
553 CodeCache::do_unloading(is_alive_closure(), purged_class);
554
555 // Prune dead klasses from subklass/sibling/implementor lists.
|
237 ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
238 MemRegion old_mr = heap->old_gen()->reserved();
239 if (young_gen_empty) {
240 modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
241 } else {
242 modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
243 }
244
245 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
246 ClassLoaderDataGraph::purge();
247 MetaspaceAux::verify_metrics();
248
249 BiasedLocking::restore_marks();
250 CodeCache::gc_epilogue();
251 JvmtiExport::gc_epilogue();
252
253 #if defined(COMPILER2) || INCLUDE_JVMCI
254 DerivedPointerTable::update_pointers();
255 #endif
256
257 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q());
258
259 ref_processor()->enqueue_discovered_references(NULL, &pt);
260
261 pt.print_enqueue_phase();
262
263 // Update time of last GC
264 reset_millis_since_last_gc();
265
266 // Let the size policy know we're done
267 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
268
269 if (UseAdaptiveSizePolicy) {
270
271 log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
272 log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
273 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
274
275 // Don't check if the size_policy is ready here. Let
276 // the size_policy check that internally.
277 if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
515 Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
516 ObjectSynchronizer::oops_do(mark_and_push_closure());
517 FlatProfiler::oops_do(mark_and_push_closure());
518 Management::oops_do(mark_and_push_closure());
519 JvmtiExport::oops_do(mark_and_push_closure());
520 SystemDictionary::always_strong_oops_do(mark_and_push_closure());
521 ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
522 // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
523 //CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
524 AOTLoader::oops_do(mark_and_push_closure());
525 }
526
527 // Flush marking stack.
528 follow_stack();
529
530 // Process reference objects found during marking
531 {
532 GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
533
534 ref_processor()->setup_policy(clear_all_softrefs);
535 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q());
536 const ReferenceProcessorStats& stats =
537 ref_processor()->process_discovered_references(
538 is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
539 gc_tracer()->report_gc_reference_stats(stats);
540 pt.print_all_references();
541 }
542
543 // This is the point where the entire marking should have completed.
544 assert(_marking_stack.is_empty(), "Marking should have completed");
545
546 {
547 GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer);
548
549 // Unload classes and purge the SystemDictionary.
550 bool purged_class = SystemDictionary::do_unloading(is_alive_closure(), _gc_timer);
551
552 // Unload nmethods.
553 CodeCache::do_unloading(is_alive_closure(), purged_class);
554
555 // Prune dead klasses from subklass/sibling/implementor lists.
|