< prev index next >

src/hotspot/share/gc/shared/collectedHeap.hpp

Print this page
rev 49945 : imported patch 8191471-g1-varying-tlab-allocation
rev 49947 : imported patch 8191471-tschatzl-comments
rev 49949 : imported patch 8191471-tschatzl-comments-open
rev 49950 : [mq]: 8191471-pliden-comments


 109 
 110   // Used for filler objects (static, but initialized in ctor).
 111   static size_t _filler_array_max_size;
 112 
 113   unsigned int _total_collections;          // ... started
 114   unsigned int _total_full_collections;     // ... started
 115   NOT_PRODUCT(volatile size_t _promotion_failure_alot_count;)
 116   NOT_PRODUCT(volatile size_t _promotion_failure_alot_gc_number;)
 117 
 118   // Reason for current garbage collection.  Should be set to
 119   // a value reflecting no collection between collections.
 120   GCCause::Cause _gc_cause;
 121   GCCause::Cause _gc_lastcause;
 122   PerfStringVariable* _perf_gc_cause;
 123   PerfStringVariable* _perf_gc_lastcause;
 124 
 125   // Constructor
 126   CollectedHeap();
 127 
 128   // Create a new tlab. All TLAB allocations must go through this.
 129   // To allow more flexible TLAB allocations min_word_size specifies
 130   // the minimum size needed, while desired_word_size is the requested
 131   // size based on ergonomics. The actually allocated size will be
 132   // returned in actual_word_size.
 133   virtual HeapWord* allocate_new_tlab(size_t min_word_size,
 134                                       size_t desired_word_size,
 135                                       size_t* actual_word_size);
 136 
 137   // Accumulate statistics on all tlabs.
 138   virtual void accumulate_statistics_all_tlabs();
 139 
 140   // Reinitialize tlabs before resuming mutators.
 141   virtual void resize_all_tlabs();
 142 
 143   // Allocate from the current thread's TLAB, with broken-out slow path.
 144   inline static HeapWord* allocate_from_tlab(Klass* klass, Thread* thread, size_t size);
 145   static HeapWord* allocate_from_tlab_slow(Klass* klass, Thread* thread, size_t size);
 146 
 147   // Allocate an uninitialized block of the given size, or returns NULL if
 148   // this is impossible.
 149   inline static HeapWord* common_mem_allocate_noinit(Klass* klass, size_t size, TRAPS);
 150 
 151   // Like allocate_init, but the block returned by a successful allocation
 152   // is guaranteed initialized to zeros.
 153   inline static HeapWord* common_mem_allocate_init(Klass* klass, size_t size, TRAPS);
 154 
 155   // Helper functions for (VM) allocation.




 109 
 110   // Used for filler objects (static, but initialized in ctor).
 111   static size_t _filler_array_max_size;
 112 
 113   unsigned int _total_collections;          // ... started
 114   unsigned int _total_full_collections;     // ... started
 115   NOT_PRODUCT(volatile size_t _promotion_failure_alot_count;)
 116   NOT_PRODUCT(volatile size_t _promotion_failure_alot_gc_number;)
 117 
 118   // Reason for current garbage collection.  Should be set to
 119   // a value reflecting no collection between collections.
 120   GCCause::Cause _gc_cause;
 121   GCCause::Cause _gc_lastcause;
 122   PerfStringVariable* _perf_gc_cause;
 123   PerfStringVariable* _perf_gc_lastcause;
 124 
 125   // Constructor
 126   CollectedHeap();
 127 
 128   // Create a new tlab. All TLAB allocations must go through this.
 129   // To allow more flexible TLAB allocations min_size specifies
 130   // the minimum size needed, while requested_size is the requested
 131   // size based on ergonomics. The actually allocated size will be
 132   // returned in actual_size.
 133   virtual HeapWord* allocate_new_tlab(size_t min_size,
 134                                       size_t requested_size,
 135                                       size_t* actual_size);
 136 
 137   // Accumulate statistics on all tlabs.
 138   virtual void accumulate_statistics_all_tlabs();
 139 
 140   // Reinitialize tlabs before resuming mutators.
 141   virtual void resize_all_tlabs();
 142 
 143   // Allocate from the current thread's TLAB, with broken-out slow path.
 144   inline static HeapWord* allocate_from_tlab(Klass* klass, Thread* thread, size_t size);
 145   static HeapWord* allocate_from_tlab_slow(Klass* klass, Thread* thread, size_t size);
 146 
 147   // Allocate an uninitialized block of the given size, or returns NULL if
 148   // this is impossible.
 149   inline static HeapWord* common_mem_allocate_noinit(Klass* klass, size_t size, TRAPS);
 150 
 151   // Like allocate_init, but the block returned by a successful allocation
 152   // is guaranteed initialized to zeros.
 153   inline static HeapWord* common_mem_allocate_init(Klass* klass, size_t size, TRAPS);
 154 
 155   // Helper functions for (VM) allocation.


< prev index next >