src/share/vm/gc_implementation/g1/concurrentMark.cpp

Print this page




2146   // Clean up will have freed any regions completely full of garbage.
2147   // Update the soft reference policy with the new heap occupancy.
2148   Universe::update_heap_info_at_gc();
2149 
2150   if (VerifyDuringGC) {
2151     HandleMark hm;  // handle scope
2152     Universe::heap()->prepare_for_verify();
2153     Universe::verify(VerifyOption_G1UsePrevMarking,
2154                      " VerifyDuringGC:(after)");
2155   }
2156 
2157   g1h->check_bitmaps("Cleanup End");
2158 
2159   g1h->verify_region_sets_optional();
2160 
2161   // We need to make this be a "collection" so any collection pause that
2162   // races with it goes around and waits for completeCleanup to finish.
2163   g1h->increment_total_collections();
2164 
2165   // Clean out dead classes and update Metaspace sizes.

2166   ClassLoaderDataGraph::purge();

2167   MetaspaceGC::compute_new_size();
2168 
2169   // We reclaimed old regions so we should calculate the sizes to make
2170   // sure we update the old gen/space data.
2171   g1h->g1mm()->update_sizes();
2172 
2173   g1h->trace_heap_after_concurrent_cycle();
2174 }
2175 
2176 void ConcurrentMark::completeCleanup() {
2177   if (has_aborted()) return;
2178 
2179   G1CollectedHeap* g1h = G1CollectedHeap::heap();
2180 
2181   _cleanup_list.verify_optional();
2182   FreeRegionList tmp_free_list("Tmp Free List");
2183 
2184   if (G1ConcRegionFreeingVerbose) {
2185     gclog_or_tty->print_cr("G1ConcRegionFreeing [complete cleanup] : "
2186                            "cleanup list has %u entries",


2574       // entry on to the global mark stack. But let's do it again.
2575       set_has_overflown();
2576     }
2577 
2578     assert(rp->num_q() == active_workers, "why not");
2579 
2580     rp->enqueue_discovered_references(executor);
2581 
2582     rp->verify_no_references_recorded();
2583     assert(!rp->discovery_enabled(), "Post condition");
2584   }
2585 
2586   if (has_overflown()) {
2587     // We can not trust g1_is_alive if the marking stack overflowed
2588     return;
2589   }
2590 
2591   assert(_markStack.isEmpty(), "Marking should have completed");
2592 
2593   // Unload Klasses, String, Symbols, Code Cache, etc.
2594 
2595   G1RemarkGCTraceTime trace("Unloading", G1Log::finer());
2596 

2597   bool purged_classes;
2598 
2599   {
2600     G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest());
2601     purged_classes = SystemDictionary::do_unloading(&g1_is_alive);
2602   }
2603 
2604   {
2605     G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest());
2606     weakRefsWorkParallelPart(&g1_is_alive, purged_classes);
2607   }

2608 
2609   if (G1StringDedup::is_enabled()) {
2610     G1RemarkGCTraceTime trace("String Deduplication Unlink", G1Log::finest());
2611     G1StringDedup::unlink(&g1_is_alive);

2612   }
2613 }
2614 
2615 void ConcurrentMark::swapMarkBitMaps() {
2616   CMBitMapRO* temp = _prevMarkBitMap;
2617   _prevMarkBitMap  = (CMBitMapRO*)_nextMarkBitMap;
2618   _nextMarkBitMap  = (CMBitMap*)  temp;
2619 }
2620 
2621 class CMObjectClosure;
2622 
2623 // Closure for iterating over objects, currently only used for
2624 // processing SATB buffers.
2625 class CMObjectClosure : public ObjectClosure {
2626 private:
2627   CMTask* _task;
2628 
2629 public:
2630   void do_object(oop obj) {
2631     _task->deal_with_reference(obj);




2146   // Clean up will have freed any regions completely full of garbage.
2147   // Update the soft reference policy with the new heap occupancy.
2148   Universe::update_heap_info_at_gc();
2149 
2150   if (VerifyDuringGC) {
2151     HandleMark hm;  // handle scope
2152     Universe::heap()->prepare_for_verify();
2153     Universe::verify(VerifyOption_G1UsePrevMarking,
2154                      " VerifyDuringGC:(after)");
2155   }
2156 
2157   g1h->check_bitmaps("Cleanup End");
2158 
2159   g1h->verify_region_sets_optional();
2160 
2161   // We need to make this be a "collection" so any collection pause that
2162   // races with it goes around and waits for completeCleanup to finish.
2163   g1h->increment_total_collections();
2164 
2165   // Clean out dead classes and update Metaspace sizes.
2166   if (G1ClassUnloadingEnabled) {
2167     ClassLoaderDataGraph::purge();
2168   }
2169   MetaspaceGC::compute_new_size();
2170 
2171   // We reclaimed old regions so we should calculate the sizes to make
2172   // sure we update the old gen/space data.
2173   g1h->g1mm()->update_sizes();
2174 
2175   g1h->trace_heap_after_concurrent_cycle();
2176 }
2177 
2178 void ConcurrentMark::completeCleanup() {
2179   if (has_aborted()) return;
2180 
2181   G1CollectedHeap* g1h = G1CollectedHeap::heap();
2182 
2183   _cleanup_list.verify_optional();
2184   FreeRegionList tmp_free_list("Tmp Free List");
2185 
2186   if (G1ConcRegionFreeingVerbose) {
2187     gclog_or_tty->print_cr("G1ConcRegionFreeing [complete cleanup] : "
2188                            "cleanup list has %u entries",


2576       // entry on to the global mark stack. But let's do it again.
2577       set_has_overflown();
2578     }
2579 
2580     assert(rp->num_q() == active_workers, "why not");
2581 
2582     rp->enqueue_discovered_references(executor);
2583 
2584     rp->verify_no_references_recorded();
2585     assert(!rp->discovery_enabled(), "Post condition");
2586   }
2587 
2588   if (has_overflown()) {
2589     // We can not trust g1_is_alive if the marking stack overflowed
2590     return;
2591   }
2592 
2593   assert(_markStack.isEmpty(), "Marking should have completed");
2594 
2595   // Unload Klasses, String, Symbols, Code Cache, etc.
2596   {
2597     G1RemarkGCTraceTime trace("Unloading", G1Log::finer());
2598 
2599     if (G1ClassUnloadingEnabled) {
2600       bool purged_classes;
2601 
2602       {
2603         G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest());
2604         purged_classes = SystemDictionary::do_unloading(&g1_is_alive);
2605       }
2606 
2607       {
2608         G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest());
2609         weakRefsWorkParallelPart(&g1_is_alive, purged_classes);
2610       }
2611     }
2612 
2613     if (G1StringDedup::is_enabled()) {
2614       G1RemarkGCTraceTime trace("String Deduplication Unlink", G1Log::finest());
2615       G1StringDedup::unlink(&g1_is_alive);
2616     }
2617   }
2618 }
2619 
2620 void ConcurrentMark::swapMarkBitMaps() {
2621   CMBitMapRO* temp = _prevMarkBitMap;
2622   _prevMarkBitMap  = (CMBitMapRO*)_nextMarkBitMap;
2623   _nextMarkBitMap  = (CMBitMap*)  temp;
2624 }
2625 
2626 class CMObjectClosure;
2627 
2628 // Closure for iterating over objects, currently only used for
2629 // processing SATB buffers.
2630 class CMObjectClosure : public ObjectClosure {
2631 private:
2632   CMTask* _task;
2633 
2634 public:
2635   void do_object(oop obj) {
2636     _task->deal_with_reference(obj);