< prev index next >

src/share/vm/gc_implementation/shared/plab.hpp

Print this page
rev 8153 : plab moved to cpp


  28 #include "gc_implementation/shared/gcUtil.hpp"
  29 #include "memory/allocation.hpp"
  30 #include "runtime/atomic.hpp"
  31 #include "utilities/globalDefinitions.hpp"
  32 
  33 // Forward declarations.
  34 class PLABStats;
  35 
  36 // A per-thread allocation buffer used during GC.
  37 class PLAB: public CHeapObj<mtGC> {
  38 protected:
  39   char      head[32];
  40   size_t    _word_sz;          // In HeapWord units
  41   HeapWord* _bottom;
  42   HeapWord* _top;
  43   HeapWord* _end;           // Last allocatable address + 1
  44   HeapWord* _hard_end;      // _end + AlignmentReserve
  45   // In support of ergonomic sizing of PLAB's
  46   size_t    _allocated;     // in HeapWord units
  47   size_t    _wasted;        // in HeapWord units

  48   char      tail[32];
  49   static size_t AlignmentReserve;
  50 
  51   // Force future allocations to fail and queries for contains()
  52   // to return false. Returns the amount of unused space in this PLAB.
  53   size_t invalidate() {
  54     _end    = _hard_end;
  55     size_t remaining = pointer_delta(_end, _top);  // Calculate remaining space.
  56     _top    = _end;      // Force future allocations to fail.
  57     _bottom = _end;      // Force future contains() queries to return false.
  58     return remaining;
  59   }
  60 
  61   // Fill in remaining space with a dummy object and invalidate the PLAB. Returns
  62   // the amount of remaining space.
  63   size_t retire_internal();
  64 






  65 public:
  66   // Initializes the buffer to be empty, but with the given "word_sz".
  67   // Must get initialized with "set_buf" for an allocation to succeed.
  68   PLAB(size_t word_sz);
  69   virtual ~PLAB() {}
  70 
  71   // Minimum PLAB size.
  72   static size_t min_size();
  73   // Maximum PLAB size.
  74   static size_t max_size();
  75 
  76   // If an allocation of the given "word_sz" can be satisfied within the
  77   // buffer, do the allocation, returning a pointer to the start of the
  78   // allocated block.  If the allocation request cannot be satisfied,
  79   // return NULL.
  80   HeapWord* allocate(size_t word_sz) {
  81     HeapWord* res = _top;
  82     if (pointer_delta(_end, _top) >= word_sz) {
  83       _top = _top + word_sz;
  84       return res;
  85     } else {
  86       return NULL;
  87     }
  88   }
  89 
  90   // Allocate the object aligned to "alignment_in_bytes".
  91   HeapWord* allocate_aligned(size_t word_sz, unsigned short alignment_in_bytes);
  92 
  93   // Undo the last allocation in the buffer, which is required to be of the
  94   // "obj" of the given "word_sz".
  95   void undo_allocation(HeapWord* obj, size_t word_sz) {
  96     assert(pointer_delta(_top, _bottom) >= word_sz, "Bad undo");
  97     assert(pointer_delta(_top, obj)     == word_sz, "Bad undo");
  98     _top = obj;
  99   }
 100 
 101   // The total (word) size of the buffer, including both allocated and
 102   // unallocated space.
 103   size_t word_sz() { return _word_sz; }
 104 



 105   // Should only be done if we are about to reset with a new buffer of the
 106   // given size.
 107   void set_word_size(size_t new_word_sz) {
 108     assert(new_word_sz > AlignmentReserve, "Too small");
 109     _word_sz = new_word_sz;
 110   }
 111 
 112   // The number of words of unallocated space remaining in the buffer.
 113   size_t words_remaining() {
 114     assert(_end >= _top, "Negative buffer");
 115     return pointer_delta(_end, _top, HeapWordSize);
 116   }
 117 
 118   bool contains(void* addr) {
 119     return (void*)_bottom <= addr && addr < (void*)_hard_end;
 120   }
 121 
 122   // Sets the space of the buffer to be [buf, space+word_sz()).
 123   virtual void set_buf(HeapWord* buf) {
 124     _bottom   = buf;


 129     // In support of ergonomic sizing
 130     _allocated += word_sz();
 131   }
 132 
 133   // Flush allocation statistics into the given PLABStats supporting ergonomic
 134   // sizing of PLAB's and retire the current buffer. To be called at the end of
 135   // GC.
 136   void flush_and_retire_stats(PLABStats* stats);
 137 
 138   // Fills in the unallocated portion of the buffer with a garbage object and updates
 139   // statistics. To be called during GC.
 140   virtual void retire();
 141 
 142   void print() PRODUCT_RETURN;
 143 };
 144 
 145 // PLAB book-keeping.
 146 class PLABStats VALUE_OBJ_CLASS_SPEC {
 147   size_t _allocated;      // Total allocated
 148   size_t _wasted;         // of which wasted (internal fragmentation)

 149   size_t _unused;         // Unused in last buffer
 150   size_t _desired_plab_sz;// Output of filter (below), suitably trimmed and quantized
 151   AdaptiveWeightedAverage
 152          _filter;         // Integrator with decay
 153 
 154   void reset() {
 155     _allocated = 0;
 156     _wasted    = 0;

 157     _unused    = 0;
 158   }
 159  public:
 160   PLABStats(size_t desired_plab_sz_, unsigned wt) :
 161     _allocated(0),
 162     _wasted(0),

 163     _unused(0),
 164     _desired_plab_sz(desired_plab_sz_),
 165     _filter(wt)
 166   { }
 167 
 168   static const size_t min_size() {
 169     return PLAB::min_size();
 170   }
 171 
 172   static const size_t max_size() {
 173     return PLAB::max_size();
 174   }
 175 
 176   size_t desired_plab_sz() {
 177     return _desired_plab_sz;
 178   }
 179 
 180   // Updates the current desired PLAB size. Computes the new desired PLAB size,
 181   // updates _desired_plab_sz and clears sensor accumulators.
 182   void adjust_desired_plab_sz(uint no_of_gc_workers);
 183 
 184   void add_allocated(size_t v) {
 185     Atomic::add_ptr(v, &_allocated);
 186   }
 187 
 188   void add_unused(size_t v) {
 189     Atomic::add_ptr(v, &_unused);
 190   }
 191 
 192   void add_wasted(size_t v) {
 193     Atomic::add_ptr(v, &_wasted);




 194   }
 195 };
 196 
 197 #endif // SHARE_VM_GC_IMPLEMENTATION_SHARED_PLAB_HPP


  28 #include "gc_implementation/shared/gcUtil.hpp"
  29 #include "memory/allocation.hpp"
  30 #include "runtime/atomic.hpp"
  31 #include "utilities/globalDefinitions.hpp"
  32 
  33 // Forward declarations.
  34 class PLABStats;
  35 
  36 // A per-thread allocation buffer used during GC.
  37 class PLAB: public CHeapObj<mtGC> {
  38 protected:
  39   char      head[32];
  40   size_t    _word_sz;          // In HeapWord units
  41   HeapWord* _bottom;
  42   HeapWord* _top;
  43   HeapWord* _end;           // Last allocatable address + 1
  44   HeapWord* _hard_end;      // _end + AlignmentReserve
  45   // In support of ergonomic sizing of PLAB's
  46   size_t    _allocated;     // in HeapWord units
  47   size_t    _wasted;        // in HeapWord units
  48   size_t    _undo_wasted;
  49   char      tail[32];
  50   static size_t AlignmentReserve;
  51 
  52   // Force future allocations to fail and queries for contains()
  53   // to return false. Returns the amount of unused space in this PLAB.
  54   size_t invalidate() {
  55     _end    = _hard_end;
  56     size_t remaining = pointer_delta(_end, _top);  // Calculate remaining space.
  57     _top    = _end;      // Force future allocations to fail.
  58     _bottom = _end;      // Force future contains() queries to return false.
  59     return remaining;
  60   }
  61 
  62   // Fill in remaining space with a dummy object and invalidate the PLAB. Returns
  63   // the amount of remaining space.
  64   size_t retire_internal();
  65 
  66   void add_undo_waste(HeapWord* obj, size_t word_sz);
  67 
  68   // Undo the last allocation in the buffer, which is required to be of the
  69   // "obj" of the given "word_sz".
  70   void undo_last_allocation(HeapWord* obj, size_t word_sz);
  71 
  72 public:
  73   // Initializes the buffer to be empty, but with the given "word_sz".
  74   // Must get initialized with "set_buf" for an allocation to succeed.
  75   PLAB(size_t word_sz);
  76   virtual ~PLAB() {}
  77 
  78   // Minimum PLAB size.
  79   static size_t min_size();
  80   // Maximum PLAB size.
  81   static size_t max_size();
  82 
  83   // If an allocation of the given "word_sz" can be satisfied within the
  84   // buffer, do the allocation, returning a pointer to the start of the
  85   // allocated block.  If the allocation request cannot be satisfied,
  86   // return NULL.
  87   HeapWord* allocate(size_t word_sz) {
  88     HeapWord* res = _top;
  89     if (pointer_delta(_end, _top) >= word_sz) {
  90       _top = _top + word_sz;
  91       return res;
  92     } else {
  93       return NULL;
  94     }
  95   }
  96 
  97   // Allocate the object aligned to "alignment_in_bytes".
  98   HeapWord* allocate_aligned(size_t word_sz, unsigned short alignment_in_bytes);
  99 
 100   // Undo any allocation in the buffer, which is required to be of the
 101   // "obj" of the given "word_sz".
 102   void undo_allocation(HeapWord* obj, size_t word_sz);




 103 
 104   // The total (word) size of the buffer, including both allocated and
 105   // unallocated space.
 106   size_t word_sz() { return _word_sz; }
 107 
 108   size_t waste() { return _wasted; }
 109   size_t undo_waste() { return _undo_wasted; }
 110 
 111   // Should only be done if we are about to reset with a new buffer of the
 112   // given size.
 113   void set_word_size(size_t new_word_sz) {
 114     assert(new_word_sz > AlignmentReserve, "Too small");
 115     _word_sz = new_word_sz;
 116   }
 117 
 118   // The number of words of unallocated space remaining in the buffer.
 119   size_t words_remaining() {
 120     assert(_end >= _top, "Negative buffer");
 121     return pointer_delta(_end, _top, HeapWordSize);
 122   }
 123 
 124   bool contains(void* addr) {
 125     return (void*)_bottom <= addr && addr < (void*)_hard_end;
 126   }
 127 
 128   // Sets the space of the buffer to be [buf, space+word_sz()).
 129   virtual void set_buf(HeapWord* buf) {
 130     _bottom   = buf;


 135     // In support of ergonomic sizing
 136     _allocated += word_sz();
 137   }
 138 
 139   // Flush allocation statistics into the given PLABStats supporting ergonomic
 140   // sizing of PLAB's and retire the current buffer. To be called at the end of
 141   // GC.
 142   void flush_and_retire_stats(PLABStats* stats);
 143 
 144   // Fills in the unallocated portion of the buffer with a garbage object and updates
 145   // statistics. To be called during GC.
 146   virtual void retire();
 147 
 148   void print() PRODUCT_RETURN;
 149 };
 150 
 151 // PLAB book-keeping.
 152 class PLABStats VALUE_OBJ_CLASS_SPEC {
 153   size_t _allocated;      // Total allocated
 154   size_t _wasted;         // of which wasted (internal fragmentation)
 155   size_t _undo_wasted;    // of which wasted on undo (is not used for calculation of PLAB size)
 156   size_t _unused;         // Unused in last buffer
 157   size_t _desired_plab_sz;// Output of filter (below), suitably trimmed and quantized
 158   AdaptiveWeightedAverage
 159          _filter;         // Integrator with decay
 160 
 161   void reset() {
 162     _allocated   = 0;
 163     _wasted      = 0;
 164     _undo_wasted = 0;
 165     _unused      = 0;
 166   }
 167  public:
 168   PLABStats(size_t desired_plab_sz_, unsigned wt) :
 169     _allocated(0),
 170     _wasted(0),
 171     _undo_wasted(0),
 172     _unused(0),
 173     _desired_plab_sz(desired_plab_sz_),
 174     _filter(wt)
 175   { }
 176 
 177   static const size_t min_size() {
 178     return PLAB::min_size();
 179   }
 180 
 181   static const size_t max_size() {
 182     return PLAB::max_size();
 183   }
 184 
 185   size_t desired_plab_sz() {
 186     return _desired_plab_sz;
 187   }
 188 
 189   // Updates the current desired PLAB size. Computes the new desired PLAB size,
 190   // updates _desired_plab_sz and clears sensor accumulators.
 191   void adjust_desired_plab_sz(uint no_of_gc_workers);
 192 
 193   void add_allocated(size_t v) {
 194     Atomic::add_ptr(v, &_allocated);
 195   }
 196 
 197   void add_unused(size_t v) {
 198     Atomic::add_ptr(v, &_unused);
 199   }
 200 
 201   void add_wasted(size_t v) {
 202     Atomic::add_ptr(v, &_wasted);
 203   }
 204 
 205   void add_undo_wasted(size_t v) {
 206     Atomic::add_ptr(v, &_undo_wasted);
 207   }
 208 };
 209 
 210 #endif // SHARE_VM_GC_IMPLEMENTATION_SHARED_PLAB_HPP
< prev index next >