< prev index next >

src/hotspot/share/gc/shared/threadLocalAllocBuffer.hpp

Print this page
rev 49643 : [mq]: heap8
rev 49644 : [mq]: event_rebased
rev 49649 : [mq]: heap14


  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_GC_SHARED_THREADLOCALALLOCBUFFER_HPP
  26 #define SHARE_VM_GC_SHARED_THREADLOCALALLOCBUFFER_HPP
  27 
  28 #include "gc/shared/gcUtil.hpp"
  29 #include "oops/typeArrayOop.hpp"
  30 #include "runtime/perfData.hpp"
  31 #include "runtime/vm_version.hpp"
  32 
  33 class GlobalTLABStats;
  34 
  35 // ThreadLocalAllocBuffer: a descriptor for thread-local storage used by
  36 // the threads for allocation.
  37 //            It is thread-private at any time, but maybe multiplexed over
  38 //            time across multiple threads. The park()/unpark() pair is
  39 //            used to make it available for such multiplexing.
  40 //
  41 //            Heap sampling is performed via the current_end/allocation_end
  42 //            fields.
  43 //            allocation_end contains the real end of the tlab allocation,
  44 //            whereas current_end can be set to an arbitrary spot in the tlab to
  45 //            trip the return and sample the allocation.
  46 class ThreadLocalAllocBuffer: public CHeapObj<mtThread> {
  47   friend class VMStructs;
  48   friend class JVMCIVMStructs;
  49 private:
  50   HeapWord* _start;                              // address of TLAB
  51   HeapWord* _top;                                // address after last allocation
  52   HeapWord* _pf_top;                             // allocation prefetch watermark
  53   HeapWord* _current_end;                        // allocation end (can be the sampling end point or _allocation_end)
  54   HeapWord* _allocation_end;                     // end for allocations (actual TLAB end, excluding alignment_reserve)
  55 
  56   size_t    _desired_size;                       // desired size   (including alignment_reserve)
  57   size_t    _refill_waste_limit;                 // hold onto tlab if free() is larger than this
  58   size_t    _allocated_before_last_gc;           // total bytes allocated up until the last gc
  59   size_t    _bytes_since_last_sample_point;      // bytes since last sample point.
  60 
  61   static size_t   _max_size;                          // maximum size of any TLAB
  62   static int      _reserve_for_allocation_prefetch;   // Reserve at the end of the TLAB
  63   static unsigned _target_refills;                    // expected number of refills between GCs
  64 
  65   unsigned  _number_of_refills;
  66   unsigned  _fast_refill_waste;
  67   unsigned  _slow_refill_waste;
  68   unsigned  _gc_waste;
  69   unsigned  _slow_allocations;
  70 
  71   AdaptiveWeightedAverage _allocation_fraction;  // fraction of eden allocated in tlabs
  72 
  73   void accumulate_statistics();
  74   void initialize_statistics();
  75 
  76   void set_start(HeapWord* start)                { _start = start; }
  77   void set_current_end(HeapWord* current_end)    { _current_end = current_end; }
  78   void set_allocation_end(HeapWord* ptr)         { _allocation_end = ptr; }
  79   void set_top(HeapWord* top)                    { _top = top; }
  80   void set_pf_top(HeapWord* pf_top)              { _pf_top = pf_top; }
  81   void set_desired_size(size_t desired_size)     { _desired_size = desired_size; }
  82   void set_refill_waste_limit(size_t waste)      { _refill_waste_limit = waste;  }
  83 
  84   size_t initial_refill_waste_limit()            { return desired_size() / TLABRefillWasteFraction; }
  85 
  86   static int    target_refills()                 { return _target_refills; }
  87   size_t initial_desired_size();
  88 
  89   size_t remaining();
  90 
  91   // Make parsable and release it.
  92   void reset();
  93 
  94   // Resize based on amount of allocation, etc.
  95   void resize();
  96 
  97   void invariants() const { assert(top() >= start() && top() <= current_end(), "invalid tlab"); }
  98 
  99   void initialize(HeapWord* start, HeapWord* top, HeapWord* end);
 100 
 101   void print_stats(const char* tag);
 102 
 103   Thread* myThread();
 104 
 105   // statistics
 106 
 107   int number_of_refills() const { return _number_of_refills; }
 108   int fast_refill_waste() const { return _fast_refill_waste; }
 109   int slow_refill_waste() const { return _slow_refill_waste; }
 110   int gc_waste() const          { return _gc_waste; }
 111   int slow_allocations() const  { return _slow_allocations; }
 112 
 113   static GlobalTLABStats* _global_stats;
 114   static GlobalTLABStats* global_stats() { return _global_stats; }
 115 
 116 public:
 117   ThreadLocalAllocBuffer() : _allocation_fraction(TLABAllocationWeight), _allocated_before_last_gc(0) {
 118     // do nothing.  tlabs must be inited by initialize() calls
 119   }
 120 
 121   static size_t min_size()                       { return align_object_size(MinTLABSize / HeapWordSize) + alignment_reserve(); }
 122   static size_t max_size()                       { assert(_max_size != 0, "max_size not set up"); return _max_size; }
 123   static size_t max_size_in_bytes()              { return max_size() * BytesPerWord; }
 124   static void set_max_size(size_t max_size)      { _max_size = max_size; }
 125 
 126   HeapWord* start() const                        { return _start; }
 127   HeapWord* current_end() const                  { return _current_end; }
 128   HeapWord* top() const                          { return _top; }
 129   HeapWord* reserved_end();
 130   HeapWord* pf_top() const                       { return _pf_top; }
 131   size_t desired_size() const                    { return _desired_size; }
 132   size_t used() const                            { return pointer_delta(top(), start()); }
 133   size_t used_bytes() const                      { return pointer_delta(top(), start(), 1); }
 134   size_t free() const                            { return pointer_delta(current_end(), top()); }
 135   // Don't discard tlab if remaining space is larger than this.
 136   size_t refill_waste_limit() const              { return _refill_waste_limit; }
 137   size_t bytes_since_last_sample_point() const   { return _bytes_since_last_sample_point; }
 138 
 139   // Allocate size HeapWords. The memory is NOT initialized to zero.
 140   inline HeapWord* allocate(size_t size);
 141   HeapWord* allocate_sampled_object(size_t size);
 142 
 143   // Reserve space at the end of TLAB
 144   static size_t end_reserve() {
 145     int reserve_size = typeArrayOopDesc::header_size(T_INT);
 146     return MAX2(reserve_size, _reserve_for_allocation_prefetch);
 147   }
 148   static size_t alignment_reserve()              { return align_object_size(end_reserve()); }
 149   static size_t alignment_reserve_in_bytes()     { return alignment_reserve() * HeapWordSize; }
 150 
 151   // Return tlab size or remaining space in eden such that the
 152   // space is large enough to hold obj_size and necessary fill space.
 153   // Otherwise return 0;
 154   inline size_t compute_size(size_t obj_size);


 164 
 165   // Retire in-use tlab before allocation of a new tlab
 166   void clear_before_allocation();
 167 
 168   // Accumulate statistics across all tlabs before gc
 169   static void accumulate_statistics_before_gc();
 170 
 171   // Resize tlabs for all threads
 172   static void resize_all_tlabs();
 173 
 174   void fill(HeapWord* start, HeapWord* top, size_t new_size);
 175   void initialize();
 176 
 177   void set_back_allocation_end();
 178   void set_sample_end();
 179 
 180   static size_t refill_waste_limit_increment()   { return TLABWasteIncrement; }
 181 
 182   // Code generation support
 183   static ByteSize start_offset()                 { return byte_offset_of(ThreadLocalAllocBuffer, _start); }
 184   static ByteSize current_end_offset()           { return byte_offset_of(ThreadLocalAllocBuffer, _current_end  ); }
 185   static ByteSize top_offset()                   { return byte_offset_of(ThreadLocalAllocBuffer, _top  ); }
 186   static ByteSize pf_top_offset()                { return byte_offset_of(ThreadLocalAllocBuffer, _pf_top  ); }
 187   static ByteSize size_offset()                  { return byte_offset_of(ThreadLocalAllocBuffer, _desired_size ); }
 188   static ByteSize refill_waste_limit_offset()    { return byte_offset_of(ThreadLocalAllocBuffer, _refill_waste_limit ); }
 189 
 190   static ByteSize number_of_refills_offset()     { return byte_offset_of(ThreadLocalAllocBuffer, _number_of_refills ); }
 191   static ByteSize fast_refill_waste_offset()     { return byte_offset_of(ThreadLocalAllocBuffer, _fast_refill_waste ); }
 192   static ByteSize slow_allocations_offset()      { return byte_offset_of(ThreadLocalAllocBuffer, _slow_allocations ); }
 193 
 194   void verify();
 195 };
 196 
 197 class GlobalTLABStats: public CHeapObj<mtThread> {
 198 private:
 199 
 200   // Accumulate perfdata in private variables because
 201   // PerfData should be write-only for security reasons
 202   // (see perfData.hpp)
 203   unsigned _allocating_threads;
 204   unsigned _total_refills;




  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_VM_GC_SHARED_THREADLOCALALLOCBUFFER_HPP
  26 #define SHARE_VM_GC_SHARED_THREADLOCALALLOCBUFFER_HPP
  27 
  28 #include "gc/shared/gcUtil.hpp"
  29 #include "oops/typeArrayOop.hpp"
  30 #include "runtime/perfData.hpp"
  31 #include "runtime/vm_version.hpp"
  32 
  33 class GlobalTLABStats;
  34 
  35 // ThreadLocalAllocBuffer: a descriptor for thread-local storage used by
  36 // the threads for allocation.
  37 //            It is thread-private at any time, but maybe multiplexed over
  38 //            time across multiple threads. The park()/unpark() pair is
  39 //            used to make it available for such multiplexing.
  40 //
  41 //            Heap sampling is performed via the fast_path_end/allocation_end
  42 //            fields.
  43 //            allocation_end contains the real end of the tlab allocation,
  44 //            whereas fast_path_end can be set to an arbitrary spot in the tlab to
  45 //            trip the return and sample the allocation.
  46 class ThreadLocalAllocBuffer: public CHeapObj<mtThread> {
  47   friend class VMStructs;
  48   friend class JVMCIVMStructs;
  49 private:
  50   HeapWord* _start;                              // address of TLAB
  51   HeapWord* _top;                                // address after last allocation
  52   HeapWord* _pf_top;                             // allocation prefetch watermark
  53   HeapWord* _fast_path_end;                      // allocation end (can be the sampling end point or _allocation_end)
  54   HeapWord* _allocation_end;                     // end for allocations (actual TLAB end, excluding alignment_reserve)
  55 
  56   size_t    _desired_size;                       // desired size   (including alignment_reserve)
  57   size_t    _refill_waste_limit;                 // hold onto tlab if free() is larger than this
  58   size_t    _allocated_before_last_gc;           // total bytes allocated up until the last gc
  59   size_t    _bytes_since_last_sample_point;      // bytes since last sample point.
  60 
  61   static size_t   _max_size;                          // maximum size of any TLAB
  62   static int      _reserve_for_allocation_prefetch;   // Reserve at the end of the TLAB
  63   static unsigned _target_refills;                    // expected number of refills between GCs
  64 
  65   unsigned  _number_of_refills;
  66   unsigned  _fast_refill_waste;
  67   unsigned  _slow_refill_waste;
  68   unsigned  _gc_waste;
  69   unsigned  _slow_allocations;
  70 
  71   AdaptiveWeightedAverage _allocation_fraction;  // fraction of eden allocated in tlabs
  72 
  73   void accumulate_statistics();
  74   void initialize_statistics();
  75 
  76   void set_start(HeapWord* start)                { _start = start; }
  77   void set_fast_path_end(HeapWord* fast_path_end){ _fast_path_end = fast_path_end; }
  78   void set_allocation_end(HeapWord* ptr)         { _allocation_end = ptr; }
  79   void set_top(HeapWord* top)                    { _top = top; }
  80   void set_pf_top(HeapWord* pf_top)              { _pf_top = pf_top; }
  81   void set_desired_size(size_t desired_size)     { _desired_size = desired_size; }
  82   void set_refill_waste_limit(size_t waste)      { _refill_waste_limit = waste;  }
  83 
  84   size_t initial_refill_waste_limit()            { return desired_size() / TLABRefillWasteFraction; }
  85 
  86   static int    target_refills()                 { return _target_refills; }
  87   size_t initial_desired_size();
  88 
  89   size_t remaining();
  90 
  91   // Make parsable and release it.
  92   void reset();
  93 
  94   // Resize based on amount of allocation, etc.
  95   void resize();
  96 
  97   void invariants() const { assert(top() >= start() && top() <= fast_path_end(), "invalid tlab"); }
  98 
  99   void initialize(HeapWord* start, HeapWord* top, HeapWord* end);
 100 
 101   void print_stats(const char* tag);
 102 
 103   Thread* myThread();
 104 
 105   // statistics
 106 
 107   int number_of_refills() const { return _number_of_refills; }
 108   int fast_refill_waste() const { return _fast_refill_waste; }
 109   int slow_refill_waste() const { return _slow_refill_waste; }
 110   int gc_waste() const          { return _gc_waste; }
 111   int slow_allocations() const  { return _slow_allocations; }
 112 
 113   static GlobalTLABStats* _global_stats;
 114   static GlobalTLABStats* global_stats() { return _global_stats; }
 115 
 116 public:
 117   ThreadLocalAllocBuffer() : _allocation_fraction(TLABAllocationWeight), _allocated_before_last_gc(0) {
 118     // do nothing.  tlabs must be inited by initialize() calls
 119   }
 120 
 121   static size_t min_size()                       { return align_object_size(MinTLABSize / HeapWordSize) + alignment_reserve(); }
 122   static size_t max_size()                       { assert(_max_size != 0, "max_size not set up"); return _max_size; }
 123   static size_t max_size_in_bytes()              { return max_size() * BytesPerWord; }
 124   static void set_max_size(size_t max_size)      { _max_size = max_size; }
 125 
 126   HeapWord* start() const                        { return _start; }
 127   HeapWord* fast_path_end() const                { return _fast_path_end; }
 128   HeapWord* top() const                          { return _top; }
 129   HeapWord* hard_end();
 130   HeapWord* pf_top() const                       { return _pf_top; }
 131   size_t desired_size() const                    { return _desired_size; }
 132   size_t used() const                            { return pointer_delta(top(), start()); }
 133   size_t used_bytes() const                      { return pointer_delta(top(), start(), 1); }
 134   size_t free() const                            { return pointer_delta(fast_path_end(), top()); }
 135   // Don't discard tlab if remaining space is larger than this.
 136   size_t refill_waste_limit() const              { return _refill_waste_limit; }
 137   size_t bytes_since_last_sample_point() const   { return _bytes_since_last_sample_point; }
 138 
 139   // Allocate size HeapWords. The memory is NOT initialized to zero.
 140   inline HeapWord* allocate(size_t size);
 141   HeapWord* allocate_sampled_object(size_t size);
 142 
 143   // Reserve space at the end of TLAB
 144   static size_t end_reserve() {
 145     int reserve_size = typeArrayOopDesc::header_size(T_INT);
 146     return MAX2(reserve_size, _reserve_for_allocation_prefetch);
 147   }
 148   static size_t alignment_reserve()              { return align_object_size(end_reserve()); }
 149   static size_t alignment_reserve_in_bytes()     { return alignment_reserve() * HeapWordSize; }
 150 
 151   // Return tlab size or remaining space in eden such that the
 152   // space is large enough to hold obj_size and necessary fill space.
 153   // Otherwise return 0;
 154   inline size_t compute_size(size_t obj_size);


 164 
 165   // Retire in-use tlab before allocation of a new tlab
 166   void clear_before_allocation();
 167 
 168   // Accumulate statistics across all tlabs before gc
 169   static void accumulate_statistics_before_gc();
 170 
 171   // Resize tlabs for all threads
 172   static void resize_all_tlabs();
 173 
 174   void fill(HeapWord* start, HeapWord* top, size_t new_size);
 175   void initialize();
 176 
 177   void set_back_allocation_end();
 178   void set_sample_end();
 179 
 180   static size_t refill_waste_limit_increment()   { return TLABWasteIncrement; }
 181 
 182   // Code generation support
 183   static ByteSize start_offset()                 { return byte_offset_of(ThreadLocalAllocBuffer, _start); }
 184   static ByteSize fast_path_end_offset()         { return byte_offset_of(ThreadLocalAllocBuffer, _fast_path_end  ); }
 185   static ByteSize top_offset()                   { return byte_offset_of(ThreadLocalAllocBuffer, _top  ); }
 186   static ByteSize pf_top_offset()                { return byte_offset_of(ThreadLocalAllocBuffer, _pf_top  ); }
 187   static ByteSize size_offset()                  { return byte_offset_of(ThreadLocalAllocBuffer, _desired_size ); }
 188   static ByteSize refill_waste_limit_offset()    { return byte_offset_of(ThreadLocalAllocBuffer, _refill_waste_limit ); }
 189 
 190   static ByteSize number_of_refills_offset()     { return byte_offset_of(ThreadLocalAllocBuffer, _number_of_refills ); }
 191   static ByteSize fast_refill_waste_offset()     { return byte_offset_of(ThreadLocalAllocBuffer, _fast_refill_waste ); }
 192   static ByteSize slow_allocations_offset()      { return byte_offset_of(ThreadLocalAllocBuffer, _slow_allocations ); }
 193 
 194   void verify();
 195 };
 196 
 197 class GlobalTLABStats: public CHeapObj<mtThread> {
 198 private:
 199 
 200   // Accumulate perfdata in private variables because
 201   // PerfData should be write-only for security reasons
 202   // (see perfData.hpp)
 203   unsigned _allocating_threads;
 204   unsigned _total_refills;


< prev index next >