src/share/vm/gc_implementation/g1/concurrentMark.cpp

Print this page




2150   // Clean up will have freed any regions completely full of garbage.
2151   // Update the soft reference policy with the new heap occupancy.
2152   Universe::update_heap_info_at_gc();
2153 
2154   if (VerifyDuringGC) {
2155     HandleMark hm;  // handle scope
2156     Universe::heap()->prepare_for_verify();
2157     Universe::verify(VerifyOption_G1UsePrevMarking,
2158                      " VerifyDuringGC:(after)");
2159   }
2160 
2161   g1h->check_bitmaps("Cleanup End");
2162 
2163   g1h->verify_region_sets_optional();
2164 
2165   // We need to make this be a "collection" so any collection pause that
2166   // races with it goes around and waits for completeCleanup to finish.
2167   g1h->increment_total_collections();
2168 
2169   // Clean out dead classes and update Metaspace sizes.
2170   if (G1ClassUnloadingEnabled) {
2171     ClassLoaderDataGraph::purge();
2172   }
2173   MetaspaceGC::compute_new_size();
2174 
2175   // We reclaimed old regions so we should calculate the sizes to make
2176   // sure we update the old gen/space data.
2177   g1h->g1mm()->update_sizes();
2178 
2179   g1h->trace_heap_after_concurrent_cycle();
2180 }
2181 
2182 void ConcurrentMark::completeCleanup() {
2183   if (has_aborted()) return;
2184 
2185   G1CollectedHeap* g1h = G1CollectedHeap::heap();
2186 
2187   _cleanup_list.verify_optional();
2188   FreeRegionList tmp_free_list("Tmp Free List");
2189 
2190   if (G1ConcRegionFreeingVerbose) {


2585 
2586     assert(rp->num_q() == active_workers, "why not");
2587 
2588     rp->enqueue_discovered_references(executor);
2589 
2590     rp->verify_no_references_recorded();
2591     assert(!rp->discovery_enabled(), "Post condition");
2592   }
2593 
2594   if (has_overflown()) {
2595     // We can not trust g1_is_alive if the marking stack overflowed
2596     return;
2597   }
2598 
2599   assert(_markStack.isEmpty(), "Marking should have completed");
2600 
2601   // Unload Klasses, String, Symbols, Code Cache, etc.
2602   {
2603     G1RemarkGCTraceTime trace("Unloading", G1Log::finer());
2604 
2605     if (G1ClassUnloadingEnabled) {
2606       bool purged_classes;
2607 
2608       {
2609         G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest());
2610         purged_classes = SystemDictionary::do_unloading(&g1_is_alive);
2611       }
2612 
2613       {
2614         G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest());
2615         weakRefsWorkParallelPart(&g1_is_alive, purged_classes);
2616       }
2617     }
2618 
2619     if (G1StringDedup::is_enabled()) {
2620       G1RemarkGCTraceTime trace("String Deduplication Unlink", G1Log::finest());
2621       G1StringDedup::unlink(&g1_is_alive);
2622     }
2623   }
2624 }
2625 




2150   // Clean up will have freed any regions completely full of garbage.
2151   // Update the soft reference policy with the new heap occupancy.
2152   Universe::update_heap_info_at_gc();
2153 
2154   if (VerifyDuringGC) {
2155     HandleMark hm;  // handle scope
2156     Universe::heap()->prepare_for_verify();
2157     Universe::verify(VerifyOption_G1UsePrevMarking,
2158                      " VerifyDuringGC:(after)");
2159   }
2160 
2161   g1h->check_bitmaps("Cleanup End");
2162 
2163   g1h->verify_region_sets_optional();
2164 
2165   // We need to make this be a "collection" so any collection pause that
2166   // races with it goes around and waits for completeCleanup to finish.
2167   g1h->increment_total_collections();
2168 
2169   // Clean out dead classes and update Metaspace sizes.
2170   if (ClassUnloadingWithConcurrentMark) {
2171     ClassLoaderDataGraph::purge();
2172   }
2173   MetaspaceGC::compute_new_size();
2174 
2175   // We reclaimed old regions so we should calculate the sizes to make
2176   // sure we update the old gen/space data.
2177   g1h->g1mm()->update_sizes();
2178 
2179   g1h->trace_heap_after_concurrent_cycle();
2180 }
2181 
2182 void ConcurrentMark::completeCleanup() {
2183   if (has_aborted()) return;
2184 
2185   G1CollectedHeap* g1h = G1CollectedHeap::heap();
2186 
2187   _cleanup_list.verify_optional();
2188   FreeRegionList tmp_free_list("Tmp Free List");
2189 
2190   if (G1ConcRegionFreeingVerbose) {


2585 
2586     assert(rp->num_q() == active_workers, "why not");
2587 
2588     rp->enqueue_discovered_references(executor);
2589 
2590     rp->verify_no_references_recorded();
2591     assert(!rp->discovery_enabled(), "Post condition");
2592   }
2593 
2594   if (has_overflown()) {
2595     // We can not trust g1_is_alive if the marking stack overflowed
2596     return;
2597   }
2598 
2599   assert(_markStack.isEmpty(), "Marking should have completed");
2600 
2601   // Unload Klasses, String, Symbols, Code Cache, etc.
2602   {
2603     G1RemarkGCTraceTime trace("Unloading", G1Log::finer());
2604 
2605     if (ClassUnloadingWithConcurrentMark) {
2606       bool purged_classes;
2607 
2608       {
2609         G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest());
2610         purged_classes = SystemDictionary::do_unloading(&g1_is_alive);
2611       }
2612 
2613       {
2614         G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest());
2615         weakRefsWorkParallelPart(&g1_is_alive, purged_classes);
2616       }
2617     }
2618 
2619     if (G1StringDedup::is_enabled()) {
2620       G1RemarkGCTraceTime trace("String Deduplication Unlink", G1Log::finest());
2621       G1StringDedup::unlink(&g1_is_alive);
2622     }
2623   }
2624 }
2625