< prev index next >

src/hotspot/share/gc/g1/g1Allocator.cpp

Print this page
rev 56323 : imported patch 8220310.mut.0
rev 56324 : imported patch 8220310.mut.1_thomas


  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "gc/g1/g1Allocator.inline.hpp"
  27 #include "gc/g1/g1AllocRegion.inline.hpp"
  28 #include "gc/g1/g1EvacStats.inline.hpp"
  29 #include "gc/g1/g1EvacuationInfo.hpp"
  30 #include "gc/g1/g1CollectedHeap.inline.hpp"
  31 #include "gc/g1/g1Policy.hpp"
  32 #include "gc/g1/heapRegion.inline.hpp"
  33 #include "gc/g1/heapRegionSet.inline.hpp"
  34 #include "gc/g1/heapRegionType.hpp"
  35 #include "utilities/align.hpp"
  36 
  37 G1Allocator::G1Allocator(G1CollectedHeap* heap) :
  38   _g1h(heap),
  39   _survivor_is_full(false),
  40   _old_is_full(false),
  41   _mutator_alloc_region(),

  42   _survivor_gc_alloc_region(heap->alloc_buffer_stats(G1HeapRegionAttr::Young)),
  43   _old_gc_alloc_region(heap->alloc_buffer_stats(G1HeapRegionAttr::Old)),
  44   _retained_old_gc_alloc_region(NULL) {












  45 }
  46 







  47 void G1Allocator::init_mutator_alloc_region() {
  48   assert(_mutator_alloc_region.get() == NULL, "pre-condition");
  49   _mutator_alloc_region.init();


  50 }
  51 
  52 void G1Allocator::release_mutator_alloc_region() {
  53   _mutator_alloc_region.release();
  54   assert(_mutator_alloc_region.get() == NULL, "post-condition");


  55 }
  56 
  57 bool G1Allocator::is_retained_old_region(HeapRegion* hr) {
  58   return _retained_old_gc_alloc_region == hr;
  59 }
  60 
  61 void G1Allocator::reuse_retained_old_region(G1EvacuationInfo& evacuation_info,
  62                                             OldGCAllocRegion* old,
  63                                             HeapRegion** retained_old) {
  64   HeapRegion* retained_region = *retained_old;
  65   *retained_old = NULL;
  66   assert(retained_region == NULL || !retained_region->is_archive(),
  67          "Archive region should not be alloc region (index %u)", retained_region->hrm_index());
  68 
  69   // We will discard the current GC alloc region if:
  70   // a) it's in the collection set (it can happen!),
  71   // b) it's already full (no point in using it),
  72   // c) it's empty (this means that it was emptied during
  73   // a cleanup and it should be on the free list now), or
  74   // d) it's humongous (this means that it was emptied


 129 bool G1Allocator::old_is_full() const {
 130   return _old_is_full;
 131 }
 132 
 133 void G1Allocator::set_survivor_full() {
 134   _survivor_is_full = true;
 135 }
 136 
 137 void G1Allocator::set_old_full() {
 138   _old_is_full = true;
 139 }
 140 
 141 size_t G1Allocator::unsafe_max_tlab_alloc() {
 142   // Return the remaining space in the cur alloc region, but not less than
 143   // the min TLAB size.
 144 
 145   // Also, this value can be at most the humongous object threshold,
 146   // since we can't allow tlabs to grow big enough to accommodate
 147   // humongous objects.
 148 
 149   HeapRegion* hr = mutator_alloc_region()->get();

 150   size_t max_tlab = _g1h->max_tlab_size() * wordSize;
 151   if (hr == NULL) {
 152     return max_tlab;
 153   } else {
 154     return MIN2(MAX2(hr->free(), (size_t) MinTLABSize), max_tlab);
 155   }
 156 }
 157 
 158 size_t G1Allocator::used_in_alloc_regions() {
 159   assert(Heap_lock->owner() != NULL, "Should be owned on this thread's behalf.");
 160   return mutator_alloc_region()->used_in_alloc_regions();



 161 }
 162 
 163 
 164 HeapWord* G1Allocator::par_allocate_during_gc(G1HeapRegionAttr dest,
 165                                               size_t word_size) {
 166   size_t temp = 0;
 167   HeapWord* result = par_allocate_during_gc(dest, word_size, word_size, &temp);
 168   assert(result == NULL || temp == word_size,
 169          "Requested " SIZE_FORMAT " words, but got " SIZE_FORMAT " at " PTR_FORMAT,
 170          word_size, temp, p2i(result));
 171   return result;
 172 }
 173 
 174 HeapWord* G1Allocator::par_allocate_during_gc(G1HeapRegionAttr dest,
 175                                               size_t min_word_size,
 176                                               size_t desired_word_size,
 177                                               size_t* actual_word_size) {
 178   switch (dest.type()) {
 179     case G1HeapRegionAttr::Young:
 180       return survivor_attempt_allocation(min_word_size, desired_word_size, actual_word_size);




  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "gc/g1/g1Allocator.inline.hpp"
  27 #include "gc/g1/g1AllocRegion.inline.hpp"
  28 #include "gc/g1/g1EvacStats.inline.hpp"
  29 #include "gc/g1/g1EvacuationInfo.hpp"
  30 #include "gc/g1/g1CollectedHeap.inline.hpp"
  31 #include "gc/g1/g1Policy.hpp"
  32 #include "gc/g1/heapRegion.inline.hpp"
  33 #include "gc/g1/heapRegionSet.inline.hpp"
  34 #include "gc/g1/heapRegionType.hpp"
  35 #include "utilities/align.hpp"
  36 
  37 G1Allocator::G1Allocator(G1CollectedHeap* heap) :
  38   _g1h(heap),
  39   _survivor_is_full(false),
  40   _old_is_full(false),
  41   _num_alloc_region(heap->mem_node_mgr()->num_active_nodes()),
  42   _mutator_alloc_region(NULL),
  43   _survivor_gc_alloc_region(heap->alloc_buffer_stats(G1HeapRegionAttr::Young)),
  44   _old_gc_alloc_region(heap->alloc_buffer_stats(G1HeapRegionAttr::Old)),
  45   _retained_old_gc_alloc_region(NULL) {
  46 
  47   _mutator_alloc_region = NEW_C_HEAP_ARRAY(MutatorAllocRegion, _num_alloc_region, mtGC);
  48   for (uint i = 0; i < _num_alloc_region; i++) {
  49     ::new(_mutator_alloc_region + i) MutatorAllocRegion(i);
  50   }
  51 }
  52 
  53 G1Allocator::~G1Allocator() {
  54   for (uint i = 0; i < _num_alloc_region; i++) {
  55     _mutator_alloc_region[i].~MutatorAllocRegion();
  56   }
  57   FREE_C_HEAP_ARRAY(MutatorAllocRegion, _mutator_alloc_region);
  58 }
  59 
  60 #ifdef ASSERT
  61 bool G1Allocator::has_mutator_alloc_region() {
  62   uint node_index = _g1h->mem_node_mgr()->index_of_current_thread();
  63   return mutator_alloc_region(node_index)->get() != NULL;
  64 }
  65 #endif
  66 
  67 void G1Allocator::init_mutator_alloc_region() {
  68   for (uint i = 0; i < _num_alloc_region; i++) {
  69     assert(mutator_alloc_region(i)->get() == NULL, "pre-condition");
  70     mutator_alloc_region(i)->init();
  71   }
  72 }
  73 
  74 void G1Allocator::release_mutator_alloc_region() {
  75   for (uint i = 0; i < _num_alloc_region; i++) {
  76     mutator_alloc_region(i)->release();
  77     assert(mutator_alloc_region(i)->get() == NULL, "post-condition");
  78   }
  79 }
  80 
  81 bool G1Allocator::is_retained_old_region(HeapRegion* hr) {
  82   return _retained_old_gc_alloc_region == hr;
  83 }
  84 
  85 void G1Allocator::reuse_retained_old_region(G1EvacuationInfo& evacuation_info,
  86                                             OldGCAllocRegion* old,
  87                                             HeapRegion** retained_old) {
  88   HeapRegion* retained_region = *retained_old;
  89   *retained_old = NULL;
  90   assert(retained_region == NULL || !retained_region->is_archive(),
  91          "Archive region should not be alloc region (index %u)", retained_region->hrm_index());
  92 
  93   // We will discard the current GC alloc region if:
  94   // a) it's in the collection set (it can happen!),
  95   // b) it's already full (no point in using it),
  96   // c) it's empty (this means that it was emptied during
  97   // a cleanup and it should be on the free list now), or
  98   // d) it's humongous (this means that it was emptied


 153 bool G1Allocator::old_is_full() const {
 154   return _old_is_full;
 155 }
 156 
 157 void G1Allocator::set_survivor_full() {
 158   _survivor_is_full = true;
 159 }
 160 
 161 void G1Allocator::set_old_full() {
 162   _old_is_full = true;
 163 }
 164 
 165 size_t G1Allocator::unsafe_max_tlab_alloc() {
 166   // Return the remaining space in the cur alloc region, but not less than
 167   // the min TLAB size.
 168 
 169   // Also, this value can be at most the humongous object threshold,
 170   // since we can't allow tlabs to grow big enough to accommodate
 171   // humongous objects.
 172 
 173   uint node_index = _g1h->mem_node_mgr()->index_of_current_thread();
 174   HeapRegion* hr = mutator_alloc_region(node_index)->get();
 175   size_t max_tlab = _g1h->max_tlab_size() * wordSize;
 176   if (hr == NULL) {
 177     return max_tlab;
 178   } else {
 179     return MIN2(MAX2(hr->free(), (size_t) MinTLABSize), max_tlab);
 180   }
 181 }
 182 
 183 size_t G1Allocator::used_in_alloc_regions() {
 184   size_t used = 0;
 185   for (uint i = 0; i < _num_alloc_region; i++) {
 186     used += mutator_alloc_region(i)->used_in_alloc_regions();
 187   }
 188   return used;
 189 }
 190 
 191 
 192 HeapWord* G1Allocator::par_allocate_during_gc(G1HeapRegionAttr dest,
 193                                               size_t word_size) {
 194   size_t temp = 0;
 195   HeapWord* result = par_allocate_during_gc(dest, word_size, word_size, &temp);
 196   assert(result == NULL || temp == word_size,
 197          "Requested " SIZE_FORMAT " words, but got " SIZE_FORMAT " at " PTR_FORMAT,
 198          word_size, temp, p2i(result));
 199   return result;
 200 }
 201 
 202 HeapWord* G1Allocator::par_allocate_during_gc(G1HeapRegionAttr dest,
 203                                               size_t min_word_size,
 204                                               size_t desired_word_size,
 205                                               size_t* actual_word_size) {
 206   switch (dest.type()) {
 207     case G1HeapRegionAttr::Young:
 208       return survivor_attempt_allocation(min_word_size, desired_word_size, actual_word_size);


< prev index next >