29 #include "memory/resourceArea.hpp"
30 #include "memory/universe.hpp"
31 #include "oops/oop.inline.hpp"
32 #include "runtime/thread.inline.hpp"
33 #include "runtime/threadSMR.hpp"
34 #include "utilities/copy.hpp"
35
36 // Thread-Local Edens support
37
38 // static member initialization
39 size_t ThreadLocalAllocBuffer::_max_size = 0;
40 int ThreadLocalAllocBuffer::_reserve_for_allocation_prefetch = 0;
41 unsigned ThreadLocalAllocBuffer::_target_refills = 0;
42 GlobalTLABStats* ThreadLocalAllocBuffer::_global_stats = NULL;
43
44 void ThreadLocalAllocBuffer::clear_before_allocation() {
45 _slow_refill_waste += (unsigned)remaining();
46 make_parsable(true); // also retire the TLAB
47 }
48
49 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
50 global_stats()->initialize();
51
52 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
53 thread->tlab().accumulate_statistics();
54 thread->tlab().initialize_statistics();
55 }
56
57 // Publish new stats if some allocation occurred.
58 if (global_stats()->allocation() != 0) {
59 global_stats()->publish();
60 global_stats()->print();
61 }
62 }
63
64 void ThreadLocalAllocBuffer::accumulate_statistics() {
65 Thread* thread = myThread();
66 size_t capacity = Universe::heap()->tlab_capacity(thread);
67 size_t used = Universe::heap()->tlab_used(thread);
68
91 global_stats()->update_allocating_threads();
92 global_stats()->update_number_of_refills(_number_of_refills);
93 global_stats()->update_allocation(_number_of_refills * desired_size());
94 global_stats()->update_gc_waste(_gc_waste);
95 global_stats()->update_slow_refill_waste(_slow_refill_waste);
96 global_stats()->update_fast_refill_waste(_fast_refill_waste);
97
98 } else {
99 assert(_number_of_refills == 0 && _fast_refill_waste == 0 &&
100 _slow_refill_waste == 0 && _gc_waste == 0,
101 "tlab stats == 0");
102 }
103 global_stats()->update_slow_allocations(_slow_allocations);
104 }
105
106 // Fills the current tlab with a dummy filler array to create
107 // an illusion of a contiguous Eden and optionally retires the tlab.
108 // Waste accounting should be done in caller as appropriate; see,
109 // for example, clear_before_allocation().
110 void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) {
111 if (end() != NULL) {
112 invariants();
113
114 if (retire) {
115 myThread()->incr_allocated_bytes(used_bytes());
116 }
117
118 CollectedHeap::fill_with_object(top(), hard_end(), retire && zap);
119
120 if (retire || ZeroTLAB) { // "Reset" the TLAB
121 set_start(NULL);
122 set_top(NULL);
123 set_pf_top(NULL);
124 set_end(NULL);
125 }
126 }
127 assert(!(retire || ZeroTLAB) ||
128 (start() == NULL && end() == NULL && top() == NULL),
129 "TLAB must be reset");
130 }
131
132 void ThreadLocalAllocBuffer::resize_all_tlabs() {
133 if (ResizeTLAB) {
134 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
135 thread->tlab().resize();
136 }
137 }
138 }
139
140 void ThreadLocalAllocBuffer::resize() {
141 // Compute the next tlab size using expected allocation amount
142 assert(ResizeTLAB, "Should not call this otherwise");
143 size_t alloc = (size_t)(_allocation_fraction.average() *
144 (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize));
145 size_t new_size = alloc / _target_refills;
146
147 new_size = MIN2(MAX2(new_size, min_size()), max_size());
148
154 _target_refills, _allocation_fraction.average(), desired_size(), aligned_new_size);
155
156 set_desired_size(aligned_new_size);
157 set_refill_waste_limit(initial_refill_waste_limit());
158 }
159
160 void ThreadLocalAllocBuffer::initialize_statistics() {
161 _number_of_refills = 0;
162 _fast_refill_waste = 0;
163 _slow_refill_waste = 0;
164 _gc_waste = 0;
165 _slow_allocations = 0;
166 }
167
168 void ThreadLocalAllocBuffer::fill(HeapWord* start,
169 HeapWord* top,
170 size_t new_size) {
171 _number_of_refills++;
172 print_stats("fill");
173 assert(top <= start + new_size - alignment_reserve(), "size too small");
174 initialize(start, top, start + new_size - alignment_reserve());
175
176 // Reset amount of internal fragmentation
177 set_refill_waste_limit(initial_refill_waste_limit());
178 }
179
180 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
181 HeapWord* top,
182 HeapWord* end) {
183 set_start(start);
184 set_top(top);
185 set_pf_top(top);
186 set_end(end);
187 invariants();
188 }
189
190 void ThreadLocalAllocBuffer::initialize() {
191 initialize(NULL, // start
192 NULL, // top
193 NULL); // end
194
195 set_desired_size(initial_desired_size());
196
197 // Following check is needed because at startup the main
198 // thread is initialized before the heap is. The initialization for
199 // this thread is redone in startup_initialization below.
200 if (Universe::heap() != NULL) {
201 size_t capacity = Universe::heap()->tlab_capacity(myThread()) / HeapWordSize;
202 double alloc_frac = desired_size() * target_refills() / (double) capacity;
203 _allocation_fraction.sample(alloc_frac);
204 }
205
206 set_refill_waste_limit(initial_refill_waste_limit());
289 _allocation_fraction.average(),
290 _allocation_fraction.average() * tlab_used / K,
291 _number_of_refills, waste_percent,
292 _gc_waste * HeapWordSize,
293 _slow_refill_waste * HeapWordSize,
294 _fast_refill_waste * HeapWordSize);
295 }
296
297 void ThreadLocalAllocBuffer::verify() {
298 HeapWord* p = start();
299 HeapWord* t = top();
300 HeapWord* prev_p = NULL;
301 while (p < t) {
302 oop(p)->verify();
303 prev_p = p;
304 p += oop(p)->size();
305 }
306 guarantee(p == top(), "end of last object must match end of space");
307 }
308
309 Thread* ThreadLocalAllocBuffer::myThread() {
310 return (Thread*)(((char *)this) +
311 in_bytes(start_offset()) -
312 in_bytes(Thread::tlab_start_offset()));
313 }
314
315
316 GlobalTLABStats::GlobalTLABStats() :
317 _allocating_threads_avg(TLABAllocationWeight) {
318
319 initialize();
320
321 _allocating_threads_avg.sample(1); // One allocating thread at startup
322
323 if (UsePerfData) {
324
325 EXCEPTION_MARK;
326 ResourceMark rm;
327
328 char* cname = PerfDataManager::counter_name("tlab", "allocThreads");
329 _perf_allocating_threads =
330 PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
331
332 cname = PerfDataManager::counter_name("tlab", "fills");
333 _perf_total_refills =
334 PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
|
29 #include "memory/resourceArea.hpp"
30 #include "memory/universe.hpp"
31 #include "oops/oop.inline.hpp"
32 #include "runtime/thread.inline.hpp"
33 #include "runtime/threadSMR.hpp"
34 #include "utilities/copy.hpp"
35
36 // Thread-Local Edens support
37
38 // static member initialization
39 size_t ThreadLocalAllocBuffer::_max_size = 0;
40 int ThreadLocalAllocBuffer::_reserve_for_allocation_prefetch = 0;
41 unsigned ThreadLocalAllocBuffer::_target_refills = 0;
42 GlobalTLABStats* ThreadLocalAllocBuffer::_global_stats = NULL;
43
44 void ThreadLocalAllocBuffer::clear_before_allocation() {
45 _slow_refill_waste += (unsigned)remaining();
46 make_parsable(true); // also retire the TLAB
47 }
48
49 size_t ThreadLocalAllocBuffer::remaining() {
50 if (current_end() == NULL) {
51 return 0;
52 }
53
54 // TODO: To be deprecated when FastTLABRefill is deprecated.
55 update_end_pointers();
56 return pointer_delta(reserved_end(), top());
57 }
58
59 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
60 global_stats()->initialize();
61
62 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
63 thread->tlab().accumulate_statistics();
64 thread->tlab().initialize_statistics();
65 }
66
67 // Publish new stats if some allocation occurred.
68 if (global_stats()->allocation() != 0) {
69 global_stats()->publish();
70 global_stats()->print();
71 }
72 }
73
74 void ThreadLocalAllocBuffer::accumulate_statistics() {
75 Thread* thread = myThread();
76 size_t capacity = Universe::heap()->tlab_capacity(thread);
77 size_t used = Universe::heap()->tlab_used(thread);
78
101 global_stats()->update_allocating_threads();
102 global_stats()->update_number_of_refills(_number_of_refills);
103 global_stats()->update_allocation(_number_of_refills * desired_size());
104 global_stats()->update_gc_waste(_gc_waste);
105 global_stats()->update_slow_refill_waste(_slow_refill_waste);
106 global_stats()->update_fast_refill_waste(_fast_refill_waste);
107
108 } else {
109 assert(_number_of_refills == 0 && _fast_refill_waste == 0 &&
110 _slow_refill_waste == 0 && _gc_waste == 0,
111 "tlab stats == 0");
112 }
113 global_stats()->update_slow_allocations(_slow_allocations);
114 }
115
116 // Fills the current tlab with a dummy filler array to create
117 // an illusion of a contiguous Eden and optionally retires the tlab.
118 // Waste accounting should be done in caller as appropriate; see,
119 // for example, clear_before_allocation().
120 void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) {
121 if (current_end() != NULL) {
122 invariants();
123
124 if (retire) {
125 myThread()->incr_allocated_bytes(used_bytes());
126 }
127
128 // TODO: To be deprecated when FastTLABRefill is deprecated.
129 update_end_pointers();
130 CollectedHeap::fill_with_object(top(), reserved_end(), retire && zap);
131
132 if (retire || ZeroTLAB) { // "Reset" the TLAB
133 set_start(NULL);
134 set_top(NULL);
135 set_pf_top(NULL);
136 set_current_end(NULL);
137 set_allocation_end(NULL);
138 set_last_slow_path_end(NULL);
139 }
140 }
141 assert(!(retire || ZeroTLAB) ||
142 (start() == NULL && current_end() == NULL && top() == NULL &&
143 _allocation_end == NULL && _last_slow_path_end == NULL),
144 "TLAB must be reset");
145 }
146
147 void ThreadLocalAllocBuffer::resize_all_tlabs() {
148 if (ResizeTLAB) {
149 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
150 thread->tlab().resize();
151 }
152 }
153 }
154
155 void ThreadLocalAllocBuffer::resize() {
156 // Compute the next tlab size using expected allocation amount
157 assert(ResizeTLAB, "Should not call this otherwise");
158 size_t alloc = (size_t)(_allocation_fraction.average() *
159 (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize));
160 size_t new_size = alloc / _target_refills;
161
162 new_size = MIN2(MAX2(new_size, min_size()), max_size());
163
169 _target_refills, _allocation_fraction.average(), desired_size(), aligned_new_size);
170
171 set_desired_size(aligned_new_size);
172 set_refill_waste_limit(initial_refill_waste_limit());
173 }
174
175 void ThreadLocalAllocBuffer::initialize_statistics() {
176 _number_of_refills = 0;
177 _fast_refill_waste = 0;
178 _slow_refill_waste = 0;
179 _gc_waste = 0;
180 _slow_allocations = 0;
181 }
182
183 void ThreadLocalAllocBuffer::fill(HeapWord* start,
184 HeapWord* top,
185 size_t new_size) {
186 _number_of_refills++;
187 print_stats("fill");
188 assert(top <= start + new_size - alignment_reserve(), "size too small");
189
190 initialize(start, top, start + new_size - alignment_reserve());
191
192 if (ThreadHeapSampler::enabled()) {
193 set_sample_end();
194 }
195
196 // Reset amount of internal fragmentation
197 set_refill_waste_limit(initial_refill_waste_limit());
198 }
199
200 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
201 HeapWord* top,
202 HeapWord* end) {
203 set_start(start);
204 set_top(top);
205 set_pf_top(top);
206 set_current_end(end);
207 set_allocation_end(end);
208 set_last_slow_path_end(end);
209 invariants();
210 }
211
212 void ThreadLocalAllocBuffer::initialize() {
213 initialize(NULL, // start
214 NULL, // top
215 NULL); // end
216
217 set_desired_size(initial_desired_size());
218
219 // Following check is needed because at startup the main
220 // thread is initialized before the heap is. The initialization for
221 // this thread is redone in startup_initialization below.
222 if (Universe::heap() != NULL) {
223 size_t capacity = Universe::heap()->tlab_capacity(myThread()) / HeapWordSize;
224 double alloc_frac = desired_size() * target_refills() / (double) capacity;
225 _allocation_fraction.sample(alloc_frac);
226 }
227
228 set_refill_waste_limit(initial_refill_waste_limit());
311 _allocation_fraction.average(),
312 _allocation_fraction.average() * tlab_used / K,
313 _number_of_refills, waste_percent,
314 _gc_waste * HeapWordSize,
315 _slow_refill_waste * HeapWordSize,
316 _fast_refill_waste * HeapWordSize);
317 }
318
319 void ThreadLocalAllocBuffer::verify() {
320 HeapWord* p = start();
321 HeapWord* t = top();
322 HeapWord* prev_p = NULL;
323 while (p < t) {
324 oop(p)->verify();
325 prev_p = p;
326 p += oop(p)->size();
327 }
328 guarantee(p == top(), "end of last object must match end of space");
329 }
330
331 void ThreadLocalAllocBuffer::set_sample_end() {
332 size_t heap_words_remaining = pointer_delta(_current_end, _top);
333 size_t bytes_until_sample = myThread()->heap_sampler().bytes_until_sample();
334 size_t words_until_sample = bytes_until_sample / HeapWordSize;;
335
336 if (heap_words_remaining > words_until_sample) {
337 HeapWord* new_end = _top + words_until_sample;
338 set_current_end(new_end);
339 set_last_slow_path_end(new_end);
340 _bytes_since_last_sample_point = bytes_until_sample;
341 } else {
342 _bytes_since_last_sample_point = heap_words_remaining * HeapWordSize;;
343 }
344 }
345
346 Thread* ThreadLocalAllocBuffer::myThread() {
347 return (Thread*)(((char *)this) +
348 in_bytes(start_offset()) -
349 in_bytes(Thread::tlab_start_offset()));
350 }
351
352 void ThreadLocalAllocBuffer::set_back_allocation_end() {
353 update_end_pointers();
354 _current_end = _allocation_end;
355 }
356
357 void ThreadLocalAllocBuffer::update_end_pointers() {
358 // Did a fast TLAB refill occur? (This will be deprecated when fast TLAB
359 // refill disappears).
360 if (_last_slow_path_end != _current_end) {
361 // Fix up the last slow path end to be now the end of this TLAB.
362 _last_slow_path_end = _current_end;
363 _allocation_end = _current_end;
364 }
365 }
366
367 HeapWord* ThreadLocalAllocBuffer::allocate_sampled_object(size_t size) {
368 Thread* thread = myThread();
369 thread->tlab().set_back_allocation_end();
370 HeapWord* result = thread->tlab().allocate(size);
371
372 if (result) {
373 thread->heap_sampler().check_for_sampling(result, size * HeapWordSize, _bytes_since_last_sample_point);
374 thread->tlab().set_sample_end();
375 }
376
377 return result;
378 }
379
380 HeapWord* ThreadLocalAllocBuffer::reserved_end() {
381 assert (_last_slow_path_end == _current_end,
382 "Have to call update_end_pointers before reserved_end.");
383 return _allocation_end + alignment_reserve();
384 }
385
386 GlobalTLABStats::GlobalTLABStats() :
387 _allocating_threads_avg(TLABAllocationWeight) {
388
389 initialize();
390
391 _allocating_threads_avg.sample(1); // One allocating thread at startup
392
393 if (UsePerfData) {
394
395 EXCEPTION_MARK;
396 ResourceMark rm;
397
398 char* cname = PerfDataManager::counter_name("tlab", "allocThreads");
399 _perf_allocating_threads =
400 PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
401
402 cname = PerfDataManager::counter_name("tlab", "fills");
403 _perf_total_refills =
404 PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
|