< prev index next >

src/hotspot/share/gc/g1/g1CollectedHeap.cpp

Print this page
rev 52611 : webrev.00
rev 52613 : webrev.01

@@ -1023,10 +1023,12 @@
   // incremental collection set and then start rebuilding it afresh
   // after this full GC.
   abandon_collection_set(collection_set());
 
   tear_down_region_sets(false /* free_list_only */);
+
+  hrm()->prepare_for_full_collection_start();
 }
 
 void G1CollectedHeap::verify_before_full_collection(bool explicit_gc) {
   assert(!GCCause::is_user_requested_gc(gc_cause()) || explicit_gc, "invariant");
   assert(used() == recalculate_used(), "Should be equal");

@@ -1034,10 +1036,12 @@
   _verifier->verify_before_gc(G1HeapVerifier::G1VerifyFull);
   _verifier->check_bitmaps("Full GC Start");
 }
 
 void G1CollectedHeap::prepare_heap_for_mutators() {
+  hrm()->prepare_for_full_collection_end();
+
   // Delete metaspaces for unloaded class loaders and clean up loader_data graph
   ClassLoaderDataGraph::purge();
   MetaspaceUtils::verify_metrics();
 
   // Prepare heap for normal collections.

@@ -1484,11 +1488,11 @@
   _archive_set("Archive Region Set", new ArchiveRegionSetChecker()),
   _humongous_set("Humongous Region Set", new HumongousRegionSetChecker()),
   _bot(NULL),
   _listener(),
   _hrm(NULL),
-  _is_hetero_heap(false),
+  _is_hetero_heap(AllocateOldGenAt != NULL),
   _allocator(NULL),
   _verifier(NULL),
   _summary_bytes_used(0),
   _archive_allocator(NULL),
   _survivor_evac_stats("Young", YoungPLABSize, PLABWeight),

@@ -1617,11 +1621,11 @@
   // cases incorrectly returns the size in wordSize units rather than
   // HeapWordSize).
   guarantee(HeapWordSize == wordSize, "HeapWordSize must equal wordSize");
 
   size_t init_byte_size = collector_policy()->initial_heap_byte_size();
-  size_t max_byte_size = collector_policy()->max_heap_byte_size();
+  size_t max_byte_size = g1_collector_policy()->heap_reservation_size_bytes();
   size_t heap_alignment = collector_policy()->heap_alignment();
 
   // Ensure that the sizes are properly aligned.
   Universe::check_alignment(init_byte_size, HeapRegion::GrainBytes, "g1 heap");
   Universe::check_alignment(max_byte_size, HeapRegion::GrainBytes, "g1 heap");

@@ -1637,14 +1641,10 @@
   // into the ReservedHeapSpace constructor) then the actual
   // base of the reserved heap may end up differing from the
   // address that was requested (i.e. the preferred heap base).
   // If this happens then we could end up using a non-optimal
   // compressed oops mode.
-  if (AllocateOldGenAt != NULL) {
-    _is_hetero_heap = true;
-    max_byte_size *= 2;
-  }
 
   ReservedSpace heap_rs = Universe::reserve_heap(max_byte_size,
                                                  heap_alignment);
 
   initialize_reserved_region((HeapWord*)heap_rs.base(), (HeapWord*)(heap_rs.base() + heap_rs.size()));

@@ -1691,10 +1691,15 @@
                                          g1_rs.size(),
                                          page_size,
                                          HeapRegion::GrainBytes,
                                          1,
                                          mtJavaHeap);
+  if(heap_storage == NULL) {
+    vm_shutdown_during_initialization("Could not initialize G1 heap");
+    return JNI_ERR;
+  }
+
   os::trace_page_sizes("Heap",
                        collector_policy()->min_heap_byte_size(),
                        max_byte_size,
                        page_size,
                        heap_rs.base(),

@@ -1721,16 +1726,12 @@
   G1RegionToSpaceMapper* prev_bitmap_storage =
     create_aux_memory_mapper("Prev Bitmap", bitmap_size, G1CMBitMap::heap_map_factor());
   G1RegionToSpaceMapper* next_bitmap_storage =
     create_aux_memory_mapper("Next Bitmap", bitmap_size, G1CMBitMap::heap_map_factor());
 
-  if (is_hetero_heap()) {
-    _hrm = new HeapRegionManagerForHeteroHeap((uint)((max_byte_size / 2) / HeapRegion::GrainBytes /*heap size as num of regions*/));
-  }
-  else {
-    _hrm = new HeapRegionManager();
-  }
+  _hrm = HeapRegionManager::create_manager(this, collector_policy());
+
   _hrm->initialize(heap_storage, prev_bitmap_storage, next_bitmap_storage, bot_storage, cardtable_storage, card_counts_storage);
   _card_table->initialize(cardtable_storage);
   // Do later initialization work for concurrent refinement.
   _hot_card_cache->initialize(card_counts_storage);
 

@@ -1780,10 +1781,15 @@
   }
 
   // Perform any initialization actions delegated to the policy.
   g1_policy()->init(this, &_collection_set);
 
+  // Now we know the target length of young list. So adjust the heap to provision that many regions on dram.
+  if (is_hetero_heap()) {
+    static_cast<HeterogeneousHeapRegionManager*>(hrm())->adjust_dram_regions((uint)g1_policy()->young_list_target_length(), workers());
+  }
+
   jint ecode = initialize_concurrent_refinement();
   if (ecode != JNI_OK) {
     return ecode;
   }
 

@@ -1918,10 +1924,14 @@
 
 CollectorPolicy* G1CollectedHeap::collector_policy() const {
   return _collector_policy;
 }
 
+G1CollectorPolicy* G1CollectedHeap::g1_collector_policy() const {
+  return _collector_policy;
+}
+
 SoftRefPolicy* G1CollectedHeap::soft_ref_policy() {
   return &_soft_ref_policy;
 }
 
 size_t G1CollectedHeap::capacity() const {

@@ -2517,10 +2527,14 @@
 void G1CollectedHeap::gc_epilogue(bool full) {
   // Update common counters.
   if (full) {
     // Update the number of full collections that have been completed.
     increment_old_marking_cycles_completed(false /* concurrent */);
+    // Now we know the target length of young list. So adjust the heap to provision that many regions on dram.
+    if (is_hetero_heap()) {
+      static_cast<HeterogeneousHeapRegionManager*>(hrm())->adjust_dram_regions((uint)g1_policy()->young_list_target_length(), workers());
+    }
   }
 
   // We are at the end of the GC. Total collections has already been increased.
   g1_rem_set()->print_periodic_summary_info("After GC RS summary", total_collections() - 1);
 
< prev index next >