240 PSCardTable* card_table = heap->card_table();
241 MemRegion old_mr = heap->old_gen()->reserved();
242 if (young_gen_empty) {
243 card_table->clear(MemRegion(old_mr.start(), old_mr.end()));
244 } else {
245 card_table->invalidate(MemRegion(old_mr.start(), old_mr.end()));
246 }
247
248 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
249 ClassLoaderDataGraph::purge();
250 MetaspaceUtils::verify_metrics();
251
252 BiasedLocking::restore_marks();
253 CodeCache::gc_epilogue();
254 JvmtiExport::gc_epilogue();
255
256 #if COMPILER2_OR_JVMCI
257 DerivedPointerTable::update_pointers();
258 #endif
259
260 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q());
261
262 ref_processor()->enqueue_discovered_references(NULL, &pt);
263
264 pt.print_enqueue_phase();
265
266 // Update time of last GC
267 reset_millis_since_last_gc();
268
269 // Let the size policy know we're done
270 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
271
272 if (UseAdaptiveSizePolicy) {
273
274 log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
275 log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
276 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
277
278 // Don't check if the size_policy is ready here. Let
279 // the size_policy check that internally.
280 if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
519 MarkingCodeBlobClosure each_active_code_blob(mark_and_push_closure(), !CodeBlobToOopClosure::FixRelocations);
520 Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
521 ObjectSynchronizer::oops_do(mark_and_push_closure());
522 Management::oops_do(mark_and_push_closure());
523 JvmtiExport::oops_do(mark_and_push_closure());
524 SystemDictionary::always_strong_oops_do(mark_and_push_closure());
525 ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
526 // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
527 //CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
528 AOTLoader::oops_do(mark_and_push_closure());
529 }
530
531 // Flush marking stack.
532 follow_stack();
533
534 // Process reference objects found during marking
535 {
536 GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
537
538 ref_processor()->setup_policy(clear_all_softrefs);
539 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_q());
540 const ReferenceProcessorStats& stats =
541 ref_processor()->process_discovered_references(
542 is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
543 gc_tracer()->report_gc_reference_stats(stats);
544 pt.print_all_references();
545 }
546
547 // This is the point where the entire marking should have completed.
548 assert(_marking_stack.is_empty(), "Marking should have completed");
549
550 {
551 GCTraceTime(Debug, gc, phases) t("Weak Processing", _gc_timer);
552 WeakProcessor::weak_oops_do(is_alive_closure(), &do_nothing_cl);
553 }
554
555 {
556 GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer);
557
558 // Unload classes and purge the SystemDictionary.
559 bool purged_class = SystemDictionary::do_unloading(is_alive_closure(), _gc_timer);
|
240 PSCardTable* card_table = heap->card_table();
241 MemRegion old_mr = heap->old_gen()->reserved();
242 if (young_gen_empty) {
243 card_table->clear(MemRegion(old_mr.start(), old_mr.end()));
244 } else {
245 card_table->invalidate(MemRegion(old_mr.start(), old_mr.end()));
246 }
247
248 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
249 ClassLoaderDataGraph::purge();
250 MetaspaceUtils::verify_metrics();
251
252 BiasedLocking::restore_marks();
253 CodeCache::gc_epilogue();
254 JvmtiExport::gc_epilogue();
255
256 #if COMPILER2_OR_JVMCI
257 DerivedPointerTable::update_pointers();
258 #endif
259
260 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_queues());
261
262 ref_processor()->enqueue_discovered_references(NULL, &pt);
263
264 pt.print_enqueue_phase();
265
266 // Update time of last GC
267 reset_millis_since_last_gc();
268
269 // Let the size policy know we're done
270 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
271
272 if (UseAdaptiveSizePolicy) {
273
274 log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
275 log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
276 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
277
278 // Don't check if the size_policy is ready here. Let
279 // the size_policy check that internally.
280 if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
519 MarkingCodeBlobClosure each_active_code_blob(mark_and_push_closure(), !CodeBlobToOopClosure::FixRelocations);
520 Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
521 ObjectSynchronizer::oops_do(mark_and_push_closure());
522 Management::oops_do(mark_and_push_closure());
523 JvmtiExport::oops_do(mark_and_push_closure());
524 SystemDictionary::always_strong_oops_do(mark_and_push_closure());
525 ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
526 // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
527 //CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
528 AOTLoader::oops_do(mark_and_push_closure());
529 }
530
531 // Flush marking stack.
532 follow_stack();
533
534 // Process reference objects found during marking
535 {
536 GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
537
538 ref_processor()->setup_policy(clear_all_softrefs);
539 ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->num_queues());
540 const ReferenceProcessorStats& stats =
541 ref_processor()->process_discovered_references(
542 is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
543 gc_tracer()->report_gc_reference_stats(stats);
544 pt.print_all_references();
545 }
546
547 // This is the point where the entire marking should have completed.
548 assert(_marking_stack.is_empty(), "Marking should have completed");
549
550 {
551 GCTraceTime(Debug, gc, phases) t("Weak Processing", _gc_timer);
552 WeakProcessor::weak_oops_do(is_alive_closure(), &do_nothing_cl);
553 }
554
555 {
556 GCTraceTime(Debug, gc, phases) t("Class Unloading", _gc_timer);
557
558 // Unload classes and purge the SystemDictionary.
559 bool purged_class = SystemDictionary::do_unloading(is_alive_closure(), _gc_timer);
|