109
110 // Used for filler objects (static, but initialized in ctor).
111 static size_t _filler_array_max_size;
112
113 unsigned int _total_collections; // ... started
114 unsigned int _total_full_collections; // ... started
115 NOT_PRODUCT(volatile size_t _promotion_failure_alot_count;)
116 NOT_PRODUCT(volatile size_t _promotion_failure_alot_gc_number;)
117
118 // Reason for current garbage collection. Should be set to
119 // a value reflecting no collection between collections.
120 GCCause::Cause _gc_cause;
121 GCCause::Cause _gc_lastcause;
122 PerfStringVariable* _perf_gc_cause;
123 PerfStringVariable* _perf_gc_lastcause;
124
125 // Constructor
126 CollectedHeap();
127
128 // Create a new tlab. All TLAB allocations must go through this.
129 virtual HeapWord* allocate_new_tlab(size_t size);
130
131 // Accumulate statistics on all tlabs.
132 virtual void accumulate_statistics_all_tlabs();
133
134 // Reinitialize tlabs before resuming mutators.
135 virtual void resize_all_tlabs();
136
137 // Allocate from the current thread's TLAB, with broken-out slow path.
138 inline static HeapWord* allocate_from_tlab(Klass* klass, Thread* thread, size_t size);
139 static HeapWord* allocate_from_tlab_slow(Klass* klass, Thread* thread, size_t size);
140
141 // Allocate an uninitialized block of the given size, or returns NULL if
142 // this is impossible.
143 inline static HeapWord* common_mem_allocate_noinit(Klass* klass, size_t size, TRAPS);
144
145 // Like allocate_init, but the block returned by a successful allocation
146 // is guaranteed initialized to zeros.
147 inline static HeapWord* common_mem_allocate_init(Klass* klass, size_t size, TRAPS);
148
149 // Helper functions for (VM) allocation.
|
109
110 // Used for filler objects (static, but initialized in ctor).
111 static size_t _filler_array_max_size;
112
113 unsigned int _total_collections; // ... started
114 unsigned int _total_full_collections; // ... started
115 NOT_PRODUCT(volatile size_t _promotion_failure_alot_count;)
116 NOT_PRODUCT(volatile size_t _promotion_failure_alot_gc_number;)
117
118 // Reason for current garbage collection. Should be set to
119 // a value reflecting no collection between collections.
120 GCCause::Cause _gc_cause;
121 GCCause::Cause _gc_lastcause;
122 PerfStringVariable* _perf_gc_cause;
123 PerfStringVariable* _perf_gc_lastcause;
124
125 // Constructor
126 CollectedHeap();
127
128 // Create a new tlab. All TLAB allocations must go through this.
129 // To allow more flexible TLAB allocations min_word_size specifies
130 // the minimum size needed, while desired_word_size is the requested
131 // size based on ergonomics. The actually allocated size will be
132 // filled out in actual_word_size.
133 virtual HeapWord* allocate_new_tlab(size_t min_word_size,
134 size_t desired_word_size,
135 size_t* actual_word_size);
136
137 // Accumulate statistics on all tlabs.
138 virtual void accumulate_statistics_all_tlabs();
139
140 // Reinitialize tlabs before resuming mutators.
141 virtual void resize_all_tlabs();
142
143 // Allocate from the current thread's TLAB, with broken-out slow path.
144 inline static HeapWord* allocate_from_tlab(Klass* klass, Thread* thread, size_t size);
145 static HeapWord* allocate_from_tlab_slow(Klass* klass, Thread* thread, size_t size);
146
147 // Allocate an uninitialized block of the given size, or returns NULL if
148 // this is impossible.
149 inline static HeapWord* common_mem_allocate_noinit(Klass* klass, size_t size, TRAPS);
150
151 // Like allocate_init, but the block returned by a successful allocation
152 // is guaranteed initialized to zeros.
153 inline static HeapWord* common_mem_allocate_init(Klass* klass, size_t size, TRAPS);
154
155 // Helper functions for (VM) allocation.
|