< prev index next >

src/hotspot/share/gc/g1/g1CollectedHeap.hpp

Print this page
rev 53988 : imported patch 8219100-cleanup-young-collection-prologue

@@ -355,10 +355,12 @@
     assert_at_safepoint();                                                    \
     assert(Thread::current_or_null() != NULL, "no current thread");           \
     assert(Thread::current()->is_VM_thread(), "current thread is not VM thread"); \
   } while (0)
 
+  const char* young_gc_name() const;
+
   // The young region list.
   G1EdenRegions _eden;
   G1SurvivorRegions _survivor;
 
   STWGCTimer* _gc_timer_stw;

@@ -728,18 +730,25 @@
   // The guts of the incremental collection pause, executed by the vm
   // thread. It returns false if it is unable to do the collection due
   // to the GC locker being active, true otherwise
   bool do_collection_pause_at_safepoint(double target_pause_time_ms);
 
+  G1HeapVerifier::G1VerifyType young_collection_verify_type() const;
+  void verify_before_young_collection(G1HeapVerifier::G1VerifyType type);
+  void verify_after_young_collection(G1HeapVerifier::G1VerifyType type);
+
+  void calculate_collection_set(G1EvacuationInfo& evacuation_info, double target_pause_time_ms);
+
   // Actually do the work of evacuating the collection set.
   void evacuate_collection_set(G1ParScanThreadStateSet* per_thread_states);
   void evacuate_optional_collection_set(G1ParScanThreadStateSet* per_thread_states);
   void evacuate_optional_regions(G1ParScanThreadStateSet* per_thread_states, G1OptionalCSet* ocset);
 
-  void pre_evacuate_collection_set();
+  void pre_evacuate_collection_set(G1EvacuationInfo& evacuation_info);
   void post_evacuate_collection_set(G1EvacuationInfo& evacuation_info, G1ParScanThreadStateSet* pss);
 
+  void expand_heap_after_young_collection();
   // Update object copying statistics.
   void record_obj_copy_mem_stats();
 
   // The hot card cache for remembered set insertion optimization.
   G1HotCardCache* _hot_card_cache;
< prev index next >