src/share/vm/gc_implementation/g1/concurrentMark.cpp

Print this page




2150   // Clean up will have freed any regions completely full of garbage.
2151   // Update the soft reference policy with the new heap occupancy.
2152   Universe::update_heap_info_at_gc();
2153 
2154   if (VerifyDuringGC) {
2155     HandleMark hm;  // handle scope
2156     Universe::heap()->prepare_for_verify();
2157     Universe::verify(VerifyOption_G1UsePrevMarking,
2158                      " VerifyDuringGC:(after)");
2159   }
2160 
2161   g1h->check_bitmaps("Cleanup End");
2162 
2163   g1h->verify_region_sets_optional();
2164 
2165   // We need to make this be a "collection" so any collection pause that
2166   // races with it goes around and waits for completeCleanup to finish.
2167   g1h->increment_total_collections();
2168 
2169   // Clean out dead classes and update Metaspace sizes.

2170   ClassLoaderDataGraph::purge();

2171   MetaspaceGC::compute_new_size();
2172 
2173   // We reclaimed old regions so we should calculate the sizes to make
2174   // sure we update the old gen/space data.
2175   g1h->g1mm()->update_sizes();
2176 
2177   g1h->trace_heap_after_concurrent_cycle();
2178 }
2179 
2180 void ConcurrentMark::completeCleanup() {
2181   if (has_aborted()) return;
2182 
2183   G1CollectedHeap* g1h = G1CollectedHeap::heap();
2184 
2185   _cleanup_list.verify_optional();
2186   FreeRegionList tmp_free_list("Tmp Free List");
2187 
2188   if (G1ConcRegionFreeingVerbose) {
2189     gclog_or_tty->print_cr("G1ConcRegionFreeing [complete cleanup] : "
2190                            "cleanup list has %u entries",


2580       // entry on to the global mark stack. But let's do it again.
2581       set_has_overflown();
2582     }
2583 
2584     assert(rp->num_q() == active_workers, "why not");
2585 
2586     rp->enqueue_discovered_references(executor);
2587 
2588     rp->verify_no_references_recorded();
2589     assert(!rp->discovery_enabled(), "Post condition");
2590   }
2591 
2592   if (has_overflown()) {
2593     // We can not trust g1_is_alive if the marking stack overflowed
2594     return;
2595   }
2596 
2597   assert(_markStack.isEmpty(), "Marking should have completed");
2598 
2599   // Unload Klasses, String, Symbols, Code Cache, etc.
2600 
2601   G1RemarkGCTraceTime trace("Unloading", G1Log::finer());
2602 

2603   bool purged_classes;
2604 
2605   {
2606     G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest());
2607     purged_classes = SystemDictionary::do_unloading(&g1_is_alive);
2608   }
2609 
2610   {
2611     G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest());
2612     weakRefsWorkParallelPart(&g1_is_alive, purged_classes);
2613   }

2614 
2615   if (G1StringDedup::is_enabled()) {
2616     G1RemarkGCTraceTime trace("String Deduplication Unlink", G1Log::finest());
2617     G1StringDedup::unlink(&g1_is_alive);

2618   }
2619 }
2620 
2621 void ConcurrentMark::swapMarkBitMaps() {
2622   CMBitMapRO* temp = _prevMarkBitMap;
2623   _prevMarkBitMap  = (CMBitMapRO*)_nextMarkBitMap;
2624   _nextMarkBitMap  = (CMBitMap*)  temp;
2625 }
2626 
2627 class CMObjectClosure;
2628 
2629 // Closure for iterating over objects, currently only used for
2630 // processing SATB buffers.
2631 class CMObjectClosure : public ObjectClosure {
2632 private:
2633   CMTask* _task;
2634 
2635 public:
2636   void do_object(oop obj) {
2637     _task->deal_with_reference(obj);




2150   // Clean up will have freed any regions completely full of garbage.
2151   // Update the soft reference policy with the new heap occupancy.
2152   Universe::update_heap_info_at_gc();
2153 
2154   if (VerifyDuringGC) {
2155     HandleMark hm;  // handle scope
2156     Universe::heap()->prepare_for_verify();
2157     Universe::verify(VerifyOption_G1UsePrevMarking,
2158                      " VerifyDuringGC:(after)");
2159   }
2160 
2161   g1h->check_bitmaps("Cleanup End");
2162 
2163   g1h->verify_region_sets_optional();
2164 
2165   // We need to make this be a "collection" so any collection pause that
2166   // races with it goes around and waits for completeCleanup to finish.
2167   g1h->increment_total_collections();
2168 
2169   // Clean out dead classes and update Metaspace sizes.
2170   if (ClassUnloadingWithConcurrentMark) {
2171     ClassLoaderDataGraph::purge();
2172   }
2173   MetaspaceGC::compute_new_size();
2174 
2175   // We reclaimed old regions so we should calculate the sizes to make
2176   // sure we update the old gen/space data.
2177   g1h->g1mm()->update_sizes();
2178 
2179   g1h->trace_heap_after_concurrent_cycle();
2180 }
2181 
2182 void ConcurrentMark::completeCleanup() {
2183   if (has_aborted()) return;
2184 
2185   G1CollectedHeap* g1h = G1CollectedHeap::heap();
2186 
2187   _cleanup_list.verify_optional();
2188   FreeRegionList tmp_free_list("Tmp Free List");
2189 
2190   if (G1ConcRegionFreeingVerbose) {
2191     gclog_or_tty->print_cr("G1ConcRegionFreeing [complete cleanup] : "
2192                            "cleanup list has %u entries",


2582       // entry on to the global mark stack. But let's do it again.
2583       set_has_overflown();
2584     }
2585 
2586     assert(rp->num_q() == active_workers, "why not");
2587 
2588     rp->enqueue_discovered_references(executor);
2589 
2590     rp->verify_no_references_recorded();
2591     assert(!rp->discovery_enabled(), "Post condition");
2592   }
2593 
2594   if (has_overflown()) {
2595     // We can not trust g1_is_alive if the marking stack overflowed
2596     return;
2597   }
2598 
2599   assert(_markStack.isEmpty(), "Marking should have completed");
2600 
2601   // Unload Klasses, String, Symbols, Code Cache, etc.
2602   {
2603     G1RemarkGCTraceTime trace("Unloading", G1Log::finer());
2604 
2605     if (ClassUnloadingWithConcurrentMark) {
2606       bool purged_classes;
2607 
2608       {
2609         G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest());
2610         purged_classes = SystemDictionary::do_unloading(&g1_is_alive);
2611       }
2612 
2613       {
2614         G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest());
2615         weakRefsWorkParallelPart(&g1_is_alive, purged_classes);
2616       }
2617     }
2618 
2619     if (G1StringDedup::is_enabled()) {
2620       G1RemarkGCTraceTime trace("String Deduplication Unlink", G1Log::finest());
2621       G1StringDedup::unlink(&g1_is_alive);
2622     }
2623   }
2624 }
2625 
2626 void ConcurrentMark::swapMarkBitMaps() {
2627   CMBitMapRO* temp = _prevMarkBitMap;
2628   _prevMarkBitMap  = (CMBitMapRO*)_nextMarkBitMap;
2629   _nextMarkBitMap  = (CMBitMap*)  temp;
2630 }
2631 
2632 class CMObjectClosure;
2633 
2634 // Closure for iterating over objects, currently only used for
2635 // processing SATB buffers.
2636 class CMObjectClosure : public ObjectClosure {
2637 private:
2638   CMTask* _task;
2639 
2640 public:
2641   void do_object(oop obj) {
2642     _task->deal_with_reference(obj);