< prev index next >

src/hotspot/share/gc/g1/g1CollectedHeap.cpp

Print this page
rev 47825 : 8189733: Cleanup Full GC setup and tear down
Reviewed-by:


  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/metadataOnStackMark.hpp"
  27 #include "classfile/stringTable.hpp"
  28 #include "classfile/symbolTable.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "code/icBuffer.hpp"
  31 #include "gc/g1/bufferingOopClosure.hpp"
  32 #include "gc/g1/concurrentMarkThread.inline.hpp"
  33 #include "gc/g1/g1Allocator.inline.hpp"
  34 #include "gc/g1/g1CollectedHeap.inline.hpp"
  35 #include "gc/g1/g1CollectionSet.hpp"
  36 #include "gc/g1/g1CollectorPolicy.hpp"
  37 #include "gc/g1/g1CollectorState.hpp"
  38 #include "gc/g1/g1ConcurrentRefine.hpp"
  39 #include "gc/g1/g1ConcurrentRefineThread.hpp"
  40 #include "gc/g1/g1EvacStats.inline.hpp"
  41 #include "gc/g1/g1FullCollector.hpp"
  42 #include "gc/g1/g1FullGCScope.hpp"
  43 #include "gc/g1/g1GCPhaseTimes.hpp"
  44 #include "gc/g1/g1HeapSizingPolicy.hpp"
  45 #include "gc/g1/g1HeapTransition.hpp"
  46 #include "gc/g1/g1HeapVerifier.hpp"
  47 #include "gc/g1/g1HotCardCache.hpp"
  48 #include "gc/g1/g1OopClosures.inline.hpp"
  49 #include "gc/g1/g1ParScanThreadState.inline.hpp"
  50 #include "gc/g1/g1Policy.hpp"
  51 #include "gc/g1/g1RegionToSpaceMapper.hpp"
  52 #include "gc/g1/g1RemSet.hpp"
  53 #include "gc/g1/g1RootClosures.hpp"
  54 #include "gc/g1/g1RootProcessor.hpp"
  55 #include "gc/g1/g1StringDedup.hpp"
  56 #include "gc/g1/g1YCTypes.hpp"
  57 #include "gc/g1/g1YoungRemSetSamplingThread.hpp"
  58 #include "gc/g1/heapRegion.inline.hpp"
  59 #include "gc/g1/heapRegionRemSet.hpp"
  60 #include "gc/g1/heapRegionSet.inline.hpp"
  61 #include "gc/g1/vm_operations_g1.hpp"
  62 #include "gc/shared/gcHeapSummary.hpp"


1200   assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
1201   assert(!ref_processor_cm()->discovery_enabled(), "Postcondition");
1202   ref_processor_stw()->verify_no_references_recorded();
1203   ref_processor_cm()->verify_no_references_recorded();
1204 }
1205 
1206 void G1CollectedHeap::print_heap_after_full_collection(G1HeapTransition* heap_transition) {
1207   // Post collection logging.
1208   // We should do this after we potentially resize the heap so
1209   // that all the COMMIT / UNCOMMIT events are generated before
1210   // the compaction events.
1211   print_hrm_post_compaction();
1212   heap_transition->print();
1213   print_heap_after_gc();
1214   print_heap_regions();
1215 #ifdef TRACESPINNING
1216   ParallelTaskTerminator::print_termination_counts();
1217 #endif
1218 }
1219 
1220 void G1CollectedHeap::do_full_collection_inner(G1FullGCScope* scope) {
1221   GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause(), true);
1222   g1_policy()->record_full_collection_start();
1223 
1224   print_heap_before_gc();
1225   print_heap_regions();
1226 
1227   abort_concurrent_cycle();
1228   verify_before_full_collection(scope->is_explicit_gc());
1229 
1230   gc_prologue(true);
1231   prepare_heap_for_full_collection();
1232 
1233   G1FullCollector collector(scope, ref_processor_stw(), concurrent_mark()->next_mark_bitmap(), workers()->active_workers());
1234   collector.prepare_collection();
1235   collector.collect();
1236   collector.complete_collection();
1237 
1238   prepare_heap_for_mutators();
1239 
1240   g1_policy()->record_full_collection_end();
1241   gc_epilogue(true);
1242 
1243   verify_after_full_collection();
1244 
1245   print_heap_after_full_collection(scope->heap_transition());
1246 }
1247 
1248 bool G1CollectedHeap::do_full_collection(bool explicit_gc,
1249                                          bool clear_all_soft_refs) {
1250   assert_at_safepoint(true /* should_be_vm_thread */);
1251 
1252   if (GCLocker::check_active_before_gc()) {
1253     // Full GC was not completed.
1254     return false;
1255   }
1256 
1257   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
1258       collector_policy()->should_clear_all_soft_refs();
1259 
1260   G1FullGCScope scope(explicit_gc, do_clear_all_soft_refs);
1261   do_full_collection_inner(&scope);




1262 
1263   // Full collection was successfully completed.
1264   return true;
1265 }
1266 
1267 void G1CollectedHeap::do_full_collection(bool clear_all_soft_refs) {
1268   // Currently, there is no facility in the do_full_collection(bool) API to notify
1269   // the caller that the collection did not succeed (e.g., because it was locked
1270   // out by the GC locker). So, right now, we'll ignore the return value.
1271   bool dummy = do_full_collection(true,                /* explicit_gc */
1272                                   clear_all_soft_refs);
1273 }
1274 
1275 void G1CollectedHeap::resize_if_necessary_after_full_collection() {
1276   // Capacity, free and used after the GC counted as full regions to
1277   // include the waste in the following calculations.
1278   const size_t capacity_after_gc = capacity();
1279   const size_t used_after_gc = capacity_after_gc - unused_committed_regions_in_bytes();
1280 
1281   // This is enforced in arguments.cpp.




  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/metadataOnStackMark.hpp"
  27 #include "classfile/stringTable.hpp"
  28 #include "classfile/symbolTable.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "code/icBuffer.hpp"
  31 #include "gc/g1/bufferingOopClosure.hpp"
  32 #include "gc/g1/concurrentMarkThread.inline.hpp"
  33 #include "gc/g1/g1Allocator.inline.hpp"
  34 #include "gc/g1/g1CollectedHeap.inline.hpp"
  35 #include "gc/g1/g1CollectionSet.hpp"
  36 #include "gc/g1/g1CollectorPolicy.hpp"
  37 #include "gc/g1/g1CollectorState.hpp"
  38 #include "gc/g1/g1ConcurrentRefine.hpp"
  39 #include "gc/g1/g1ConcurrentRefineThread.hpp"
  40 #include "gc/g1/g1EvacStats.inline.hpp"
  41 #include "gc/g1/g1FullCollector.hpp"

  42 #include "gc/g1/g1GCPhaseTimes.hpp"
  43 #include "gc/g1/g1HeapSizingPolicy.hpp"
  44 #include "gc/g1/g1HeapTransition.hpp"
  45 #include "gc/g1/g1HeapVerifier.hpp"
  46 #include "gc/g1/g1HotCardCache.hpp"
  47 #include "gc/g1/g1OopClosures.inline.hpp"
  48 #include "gc/g1/g1ParScanThreadState.inline.hpp"
  49 #include "gc/g1/g1Policy.hpp"
  50 #include "gc/g1/g1RegionToSpaceMapper.hpp"
  51 #include "gc/g1/g1RemSet.hpp"
  52 #include "gc/g1/g1RootClosures.hpp"
  53 #include "gc/g1/g1RootProcessor.hpp"
  54 #include "gc/g1/g1StringDedup.hpp"
  55 #include "gc/g1/g1YCTypes.hpp"
  56 #include "gc/g1/g1YoungRemSetSamplingThread.hpp"
  57 #include "gc/g1/heapRegion.inline.hpp"
  58 #include "gc/g1/heapRegionRemSet.hpp"
  59 #include "gc/g1/heapRegionSet.inline.hpp"
  60 #include "gc/g1/vm_operations_g1.hpp"
  61 #include "gc/shared/gcHeapSummary.hpp"


1199   assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
1200   assert(!ref_processor_cm()->discovery_enabled(), "Postcondition");
1201   ref_processor_stw()->verify_no_references_recorded();
1202   ref_processor_cm()->verify_no_references_recorded();
1203 }
1204 
1205 void G1CollectedHeap::print_heap_after_full_collection(G1HeapTransition* heap_transition) {
1206   // Post collection logging.
1207   // We should do this after we potentially resize the heap so
1208   // that all the COMMIT / UNCOMMIT events are generated before
1209   // the compaction events.
1210   print_hrm_post_compaction();
1211   heap_transition->print();
1212   print_heap_after_gc();
1213   print_heap_regions();
1214 #ifdef TRACESPINNING
1215   ParallelTaskTerminator::print_termination_counts();
1216 #endif
1217 }
1218 




























1219 bool G1CollectedHeap::do_full_collection(bool explicit_gc,
1220                                          bool clear_all_soft_refs) {
1221   assert_at_safepoint(true /* should_be_vm_thread */);
1222 
1223   if (GCLocker::check_active_before_gc()) {
1224     // Full GC was not completed.
1225     return false;
1226   }
1227 
1228   const bool do_clear_all_soft_refs = clear_all_soft_refs ||
1229       collector_policy()->should_clear_all_soft_refs();
1230 
1231   G1FullCollector collector(this, explicit_gc, do_clear_all_soft_refs);
1232   GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause(), true);
1233 
1234   collector.prepare_collection();
1235   collector.collect();
1236   collector.complete_collection();
1237 
1238   // Full collection was successfully completed.
1239   return true;
1240 }
1241 
1242 void G1CollectedHeap::do_full_collection(bool clear_all_soft_refs) {
1243   // Currently, there is no facility in the do_full_collection(bool) API to notify
1244   // the caller that the collection did not succeed (e.g., because it was locked
1245   // out by the GC locker). So, right now, we'll ignore the return value.
1246   bool dummy = do_full_collection(true,                /* explicit_gc */
1247                                   clear_all_soft_refs);
1248 }
1249 
1250 void G1CollectedHeap::resize_if_necessary_after_full_collection() {
1251   // Capacity, free and used after the GC counted as full regions to
1252   // include the waste in the following calculations.
1253   const size_t capacity_after_gc = capacity();
1254   const size_t used_after_gc = capacity_after_gc - unused_committed_regions_in_bytes();
1255 
1256   // This is enforced in arguments.cpp.


< prev index next >