< prev index next >

src/hotspot/share/gc/g1/g1ConcurrentMark.cpp

Print this page
rev 49502 : imported patch 8199742-collectorstate-fixes
rev 49504 : imported patch 8151171-renamings
rev 49505 : [mq]: 8151171-stefanj-review
rev 49506 : imported patch 8200234-g1concurrentmark-refactorings
rev 49507 : imported patch 8200234-stefanj-review
rev 49510 : 8200255: Remove G1CMTask::_concurrent
Reviewed-by: sangheki, sjohanss
rev 49511 : 8200074: Remove G1ConcurrentMark::_concurrent_marking_in_progress
Reviewed-by: sjohanss, sangheki
rev 49512 : imported patch 8200305-gc,liveness-output
rev 49513 : [mq]: 8200385-prev-bitmap-marks-left
rev 49514 : [mq]: 8200385-stefanj-review
rev 49515 : imported patch 8178105-switch-at-remark
rev 49516 : [mq]: 8154528-reclaim-at-remark

*** 1130,1139 **** --- 1130,1151 ---- G1UpdateRemSetTrackingBeforeRebuild cl(_g1h, this); _g1h->heap_region_iterate(&cl); log_debug(gc, remset, tracking)("Remembered Set Tracking update regions total %u, selected %u", _g1h->num_regions(), cl.num_selected_for_rebuild()); } + { + GCTraceTime(Debug, gc, phases)("Reclaim Empty Regions"); + reclaim_empty_regions(); + } + + // Clean out dead classes + if (ClassUnloadingWithConcurrentMark) { + GCTraceTime(Debug, gc, phases)("Purge Metaspace"); + ClassLoaderDataGraph::purge(); + } + + compute_new_sizes(); verify_during_pause(G1HeapVerifier::G1VerifyRemark, VerifyOption_G1UsePrevMarking, "Remark after"); assert(!restart_for_overflow(), "sanity"); // Completely reset the marking state since marking completed
*** 1268,1277 **** --- 1280,1301 ---- // And actually make them available. _g1h->prepend_to_freelist(&empty_regions_list); } } + void G1ConcurrentMark::compute_new_sizes() { + MetaspaceGC::compute_new_size(); + + // Cleanup will have freed any regions completely full of garbage. + // Update the soft reference policy with the new heap occupancy. + Universe::update_heap_info_at_gc(); + + // We reclaimed old regions so we should calculate the sizes to make + // sure we update the old gen/space data. + _g1h->g1mm()->update_sizes(); + } + void G1ConcurrentMark::cleanup() { assert_at_safepoint_on_vm_thread(); // If a full collection has happened, we shouldn't do this. if (has_aborted()) {
*** 1294,1323 **** if (log_is_enabled(Trace, gc, liveness)) { G1PrintRegionLivenessInfoClosure cl("Post-Cleanup"); _g1h->heap_region_iterate(&cl); } - { - GCTraceTime(Debug, gc, phases)("Reclaim Empty Regions"); - reclaim_empty_regions(); - } - - // Cleanup will have freed any regions completely full of garbage. - // Update the soft reference policy with the new heap occupancy. - Universe::update_heap_info_at_gc(); - - // Clean out dead classes and update Metaspace sizes. - if (ClassUnloadingWithConcurrentMark) { - GCTraceTime(Debug, gc, phases)("Purge Metaspace"); - ClassLoaderDataGraph::purge(); - } - MetaspaceGC::compute_new_size(); - - // We reclaimed old regions so we should calculate the sizes to make - // sure we update the old gen/space data. - _g1h->g1mm()->update_sizes(); - verify_during_pause(G1HeapVerifier::G1VerifyCleanup, VerifyOption_G1UsePrevMarking, "Cleanup after"); // We need to make this be a "collection" so any collection pause that // races with it goes around and waits for Cleanup to finish. _g1h->increment_total_collections(); --- 1318,1327 ----
< prev index next >