< prev index next >

src/hotspot/share/gc/g1/g1AllocRegion.cpp

Print this page
rev 55208 : imported patch 8220089.webrev.0
rev 55211 : imported patch 8220089.webrev.3
rev 55212 : imported patch 8220089.webrev.4

@@ -1,7 +1,7 @@
 /*
- * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
  * under the terms of the GNU General Public License version 2 only, as
  * published by the Free Software Foundation.

@@ -93,11 +93,11 @@
   assert(alloc_region->free() / HeapWordSize < min_word_size_to_fill,
          "post-condition");
   return result;
 }
 
-size_t G1AllocRegion::retire_internal(HeapRegion* alloc_region, bool fill_up) {
+size_t G1AllocRegion::retire_internal(HeapRegion* alloc_region, bool fill_up, bool forget_used_before) {
   // We never have to check whether the active region is empty or not,
   // and potentially free it if it is, given that it's guaranteed that
   // it will never be empty.
   size_t waste = 0;
   assert_alloc_region(!alloc_region->is_empty(),

@@ -108,11 +108,14 @@
   }
 
   assert_alloc_region(alloc_region->used() >= _used_bytes_before, "invariant");
   size_t allocated_bytes = alloc_region->used() - _used_bytes_before;
   retire_region(alloc_region, allocated_bytes);
+
+  if (forget_used_before) {
   _used_bytes_before = 0;
+  }
 
   return waste;
 }
 
 size_t G1AllocRegion::retire(bool fill_up) {

@@ -132,11 +135,10 @@
 }
 
 HeapWord* G1AllocRegion::new_alloc_region_and_allocate(size_t word_size,
                                                        bool force) {
   assert_alloc_region(_alloc_region == _dummy_region, "pre-condition");
-  assert_alloc_region(_used_bytes_before == 0, "pre-condition");
 
   trace("attempting region allocation");
   HeapRegion* new_alloc_region = allocate_new_region(word_size, force);
   if (new_alloc_region != NULL) {
     new_alloc_region->reset_pre_dummy_top();

@@ -263,11 +265,16 @@
   return _g1h->new_mutator_alloc_region(word_size, force);
 }
 
 void MutatorAllocRegion::retire_region(HeapRegion* alloc_region,
                                        size_t allocated_bytes) {
-  _g1h->retire_mutator_alloc_region(alloc_region, allocated_bytes);
+  size_t used_bytes = allocated_bytes;
+
+  if (alloc_region == _retained_alloc_region) {
+    used_bytes = alloc_region->used() - _retained_used_bytes_before;
+  }
+  _g1h->retire_mutator_alloc_region(alloc_region, used_bytes);
 }
 
 void MutatorAllocRegion::init() {
   assert(_retained_alloc_region == NULL, "Pre-condition");
   G1AllocRegion::init();

@@ -296,13 +303,14 @@
     // Retain the current region if it fits a TLAB and has more
     // free than the currently retained region.
     if (should_retain(current_region)) {
       trace("mutator retained");
       if (_retained_alloc_region != NULL) {
-        waste = retire_internal(_retained_alloc_region, true);
+        waste = retire_internal(_retained_alloc_region, true, false);
       }
       _retained_alloc_region = current_region;
+      _retained_used_bytes_before = used_bytes_before();
     } else {
       waste = retire_internal(current_region, fill_up);
     }
     reset_alloc_region();
   }

@@ -315,15 +323,17 @@
 size_t MutatorAllocRegion::used_in_alloc_regions() {
   size_t used = 0;
   HeapRegion* hr = get();
   if (hr != NULL) {
     used += hr->used();
+    used -= used_bytes_before();
   }
 
   hr = _retained_alloc_region;
   if (hr != NULL) {
     used += hr->used();
+    used -= _retained_used_bytes_before;
   }
   return used;
 }
 
 HeapRegion* MutatorAllocRegion::release() {

@@ -333,10 +343,11 @@
   // done after the above call to release the mutator alloc region,
   // since it might update the _retained_alloc_region member.
   if (_retained_alloc_region != NULL) {
     _wasted_bytes += retire_internal(_retained_alloc_region, false);
     _retained_alloc_region = NULL;
+    _retained_used_bytes_before = 0;
   }
   log_debug(gc, alloc, region)("Mutator Allocation stats, regions: %u, wasted size: " SIZE_FORMAT "%s (%4.1f%%)",
                                count(),
                                byte_size_in_proper_unit(_wasted_bytes),
                                proper_unit_for_byte_size(_wasted_bytes),

@@ -363,10 +374,14 @@
     _stats->add_region_end_waste(end_waste / HeapWordSize);
   }
   return end_waste;
 }
 
+bool SurvivorGCAllocRegion::should_retain(HeapRegion* region) {
+  return region != NULL && region->free() >= MinTLABSize;
+}
+
 HeapRegion* OldGCAllocRegion::release() {
   HeapRegion* cur = get();
   if (cur != NULL) {
     // Determine how far we are from the next card boundary. If it is smaller than
     // the minimum object size we can allocate into, expand into the next card.
< prev index next >