34 #include "utilities/copy.hpp"
35
36 // Thread-Local Edens support
37
38 // static member initialization
39 size_t ThreadLocalAllocBuffer::_max_size = 0;
40 int ThreadLocalAllocBuffer::_reserve_for_allocation_prefetch = 0;
41 unsigned ThreadLocalAllocBuffer::_target_refills = 0;
42 GlobalTLABStats* ThreadLocalAllocBuffer::_global_stats = NULL;
43
44 void ThreadLocalAllocBuffer::clear_before_allocation() {
45 _slow_refill_waste += (unsigned)remaining();
46 make_parsable(true); // also retire the TLAB
47 }
48
49 size_t ThreadLocalAllocBuffer::remaining() {
50 if (current_end() == NULL) {
51 return 0;
52 }
53
54 // TODO: To be deprecated when FastTLABRefill is deprecated.
55 update_end_pointers();
56 return pointer_delta(reserved_end(), top());
57 }
58
59 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
60 global_stats()->initialize();
61
62 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
63 thread->tlab().accumulate_statistics();
64 thread->tlab().initialize_statistics();
65 }
66
67 // Publish new stats if some allocation occurred.
68 if (global_stats()->allocation() != 0) {
69 global_stats()->publish();
70 global_stats()->print();
71 }
72 }
73
74 void ThreadLocalAllocBuffer::accumulate_statistics() {
75 Thread* thread = myThread();
108 } else {
109 assert(_number_of_refills == 0 && _fast_refill_waste == 0 &&
110 _slow_refill_waste == 0 && _gc_waste == 0,
111 "tlab stats == 0");
112 }
113 global_stats()->update_slow_allocations(_slow_allocations);
114 }
115
116 // Fills the current tlab with a dummy filler array to create
117 // an illusion of a contiguous Eden and optionally retires the tlab.
118 // Waste accounting should be done in caller as appropriate; see,
119 // for example, clear_before_allocation().
120 void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) {
121 if (current_end() != NULL) {
122 invariants();
123
124 if (retire) {
125 myThread()->incr_allocated_bytes(used_bytes());
126 }
127
128 // TODO: To be deprecated when FastTLABRefill is deprecated.
129 update_end_pointers();
130 CollectedHeap::fill_with_object(top(), reserved_end(), retire && zap);
131
132 if (retire || ZeroTLAB) { // "Reset" the TLAB
133 set_start(NULL);
134 set_top(NULL);
135 set_pf_top(NULL);
136 set_current_end(NULL);
137 set_allocation_end(NULL);
138 set_last_slow_path_end(NULL);
139 }
140 }
141 assert(!(retire || ZeroTLAB) ||
142 (start() == NULL && current_end() == NULL && top() == NULL &&
143 _allocation_end == NULL && _last_slow_path_end == NULL),
144 "TLAB must be reset");
145 }
146
147 void ThreadLocalAllocBuffer::resize_all_tlabs() {
148 if (ResizeTLAB) {
149 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
150 thread->tlab().resize();
151 }
152 }
153 }
154
155 void ThreadLocalAllocBuffer::resize() {
156 // Compute the next tlab size using expected allocation amount
157 assert(ResizeTLAB, "Should not call this otherwise");
158 size_t alloc = (size_t)(_allocation_fraction.average() *
159 (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize));
160 size_t new_size = alloc / _target_refills;
161
162 new_size = MIN2(MAX2(new_size, min_size()), max_size());
163
188 assert(top <= start + new_size - alignment_reserve(), "size too small");
189
190 initialize(start, top, start + new_size - alignment_reserve());
191
192 if (ThreadHeapSampler::enabled()) {
193 set_sample_end();
194 }
195
196 // Reset amount of internal fragmentation
197 set_refill_waste_limit(initial_refill_waste_limit());
198 }
199
200 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
201 HeapWord* top,
202 HeapWord* end) {
203 set_start(start);
204 set_top(top);
205 set_pf_top(top);
206 set_current_end(end);
207 set_allocation_end(end);
208 set_last_slow_path_end(end);
209 invariants();
210 }
211
212 void ThreadLocalAllocBuffer::initialize() {
213 initialize(NULL, // start
214 NULL, // top
215 NULL); // end
216
217 set_desired_size(initial_desired_size());
218
219 // Following check is needed because at startup the main
220 // thread is initialized before the heap is. The initialization for
221 // this thread is redone in startup_initialization below.
222 if (Universe::heap() != NULL) {
223 size_t capacity = Universe::heap()->tlab_capacity(myThread()) / HeapWordSize;
224 double alloc_frac = desired_size() * target_refills() / (double) capacity;
225 _allocation_fraction.sample(alloc_frac);
226 }
227
228 set_refill_waste_limit(initial_refill_waste_limit());
319 void ThreadLocalAllocBuffer::verify() {
320 HeapWord* p = start();
321 HeapWord* t = top();
322 HeapWord* prev_p = NULL;
323 while (p < t) {
324 oop(p)->verify();
325 prev_p = p;
326 p += oop(p)->size();
327 }
328 guarantee(p == top(), "end of last object must match end of space");
329 }
330
331 void ThreadLocalAllocBuffer::set_sample_end() {
332 size_t heap_words_remaining = pointer_delta(_current_end, _top);
333 size_t bytes_until_sample = myThread()->heap_sampler().bytes_until_sample();
334 size_t words_until_sample = bytes_until_sample / HeapWordSize;;
335
336 if (heap_words_remaining > words_until_sample) {
337 HeapWord* new_end = _top + words_until_sample;
338 set_current_end(new_end);
339 set_last_slow_path_end(new_end);
340 _bytes_since_last_sample_point = bytes_until_sample;
341 } else {
342 _bytes_since_last_sample_point = heap_words_remaining * HeapWordSize;;
343 }
344 }
345
346 Thread* ThreadLocalAllocBuffer::myThread() {
347 return (Thread*)(((char *)this) +
348 in_bytes(start_offset()) -
349 in_bytes(Thread::tlab_start_offset()));
350 }
351
352 void ThreadLocalAllocBuffer::set_back_allocation_end() {
353 update_end_pointers();
354 _current_end = _allocation_end;
355 }
356
357 void ThreadLocalAllocBuffer::update_end_pointers() {
358 // Did a fast TLAB refill occur? (This will be deprecated when fast TLAB
359 // refill disappears).
360 if (_last_slow_path_end != _current_end) {
361 // Fix up the last slow path end to be now the end of this TLAB.
362 _last_slow_path_end = _current_end;
363 _allocation_end = _current_end;
364 }
365 }
366
367 HeapWord* ThreadLocalAllocBuffer::allocate_sampled_object(size_t size) {
368 Thread* thread = myThread();
369 thread->tlab().set_back_allocation_end();
370 HeapWord* result = thread->tlab().allocate(size);
371
372 if (result) {
373 thread->heap_sampler().check_for_sampling(result, size * HeapWordSize, _bytes_since_last_sample_point);
374 thread->tlab().set_sample_end();
375 }
376
377 return result;
378 }
379
380 HeapWord* ThreadLocalAllocBuffer::reserved_end() {
381 assert (_last_slow_path_end == _current_end,
382 "Have to call update_end_pointers before reserved_end.");
383 return _allocation_end + alignment_reserve();
384 }
385
386 GlobalTLABStats::GlobalTLABStats() :
387 _allocating_threads_avg(TLABAllocationWeight) {
388
389 initialize();
390
391 _allocating_threads_avg.sample(1); // One allocating thread at startup
392
393 if (UsePerfData) {
394
395 EXCEPTION_MARK;
396 ResourceMark rm;
397
398 char* cname = PerfDataManager::counter_name("tlab", "allocThreads");
399 _perf_allocating_threads =
400 PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
401
402 cname = PerfDataManager::counter_name("tlab", "fills");
|
34 #include "utilities/copy.hpp"
35
36 // Thread-Local Edens support
37
38 // static member initialization
39 size_t ThreadLocalAllocBuffer::_max_size = 0;
40 int ThreadLocalAllocBuffer::_reserve_for_allocation_prefetch = 0;
41 unsigned ThreadLocalAllocBuffer::_target_refills = 0;
42 GlobalTLABStats* ThreadLocalAllocBuffer::_global_stats = NULL;
43
44 void ThreadLocalAllocBuffer::clear_before_allocation() {
45 _slow_refill_waste += (unsigned)remaining();
46 make_parsable(true); // also retire the TLAB
47 }
48
49 size_t ThreadLocalAllocBuffer::remaining() {
50 if (current_end() == NULL) {
51 return 0;
52 }
53
54 return pointer_delta(reserved_end(), top());
55 }
56
57 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
58 global_stats()->initialize();
59
60 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
61 thread->tlab().accumulate_statistics();
62 thread->tlab().initialize_statistics();
63 }
64
65 // Publish new stats if some allocation occurred.
66 if (global_stats()->allocation() != 0) {
67 global_stats()->publish();
68 global_stats()->print();
69 }
70 }
71
72 void ThreadLocalAllocBuffer::accumulate_statistics() {
73 Thread* thread = myThread();
106 } else {
107 assert(_number_of_refills == 0 && _fast_refill_waste == 0 &&
108 _slow_refill_waste == 0 && _gc_waste == 0,
109 "tlab stats == 0");
110 }
111 global_stats()->update_slow_allocations(_slow_allocations);
112 }
113
114 // Fills the current tlab with a dummy filler array to create
115 // an illusion of a contiguous Eden and optionally retires the tlab.
116 // Waste accounting should be done in caller as appropriate; see,
117 // for example, clear_before_allocation().
118 void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) {
119 if (current_end() != NULL) {
120 invariants();
121
122 if (retire) {
123 myThread()->incr_allocated_bytes(used_bytes());
124 }
125
126 CollectedHeap::fill_with_object(top(), reserved_end(), retire && zap);
127
128 if (retire || ZeroTLAB) { // "Reset" the TLAB
129 set_start(NULL);
130 set_top(NULL);
131 set_pf_top(NULL);
132 set_current_end(NULL);
133 set_allocation_end(NULL);
134 }
135 }
136 assert(!(retire || ZeroTLAB) ||
137 (start() == NULL && current_end() == NULL && top() == NULL &&
138 _allocation_end == NULL),
139 "TLAB must be reset");
140 }
141
142 void ThreadLocalAllocBuffer::resize_all_tlabs() {
143 if (ResizeTLAB) {
144 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
145 thread->tlab().resize();
146 }
147 }
148 }
149
150 void ThreadLocalAllocBuffer::resize() {
151 // Compute the next tlab size using expected allocation amount
152 assert(ResizeTLAB, "Should not call this otherwise");
153 size_t alloc = (size_t)(_allocation_fraction.average() *
154 (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize));
155 size_t new_size = alloc / _target_refills;
156
157 new_size = MIN2(MAX2(new_size, min_size()), max_size());
158
183 assert(top <= start + new_size - alignment_reserve(), "size too small");
184
185 initialize(start, top, start + new_size - alignment_reserve());
186
187 if (ThreadHeapSampler::enabled()) {
188 set_sample_end();
189 }
190
191 // Reset amount of internal fragmentation
192 set_refill_waste_limit(initial_refill_waste_limit());
193 }
194
195 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
196 HeapWord* top,
197 HeapWord* end) {
198 set_start(start);
199 set_top(top);
200 set_pf_top(top);
201 set_current_end(end);
202 set_allocation_end(end);
203 invariants();
204 }
205
206 void ThreadLocalAllocBuffer::initialize() {
207 initialize(NULL, // start
208 NULL, // top
209 NULL); // end
210
211 set_desired_size(initial_desired_size());
212
213 // Following check is needed because at startup the main
214 // thread is initialized before the heap is. The initialization for
215 // this thread is redone in startup_initialization below.
216 if (Universe::heap() != NULL) {
217 size_t capacity = Universe::heap()->tlab_capacity(myThread()) / HeapWordSize;
218 double alloc_frac = desired_size() * target_refills() / (double) capacity;
219 _allocation_fraction.sample(alloc_frac);
220 }
221
222 set_refill_waste_limit(initial_refill_waste_limit());
313 void ThreadLocalAllocBuffer::verify() {
314 HeapWord* p = start();
315 HeapWord* t = top();
316 HeapWord* prev_p = NULL;
317 while (p < t) {
318 oop(p)->verify();
319 prev_p = p;
320 p += oop(p)->size();
321 }
322 guarantee(p == top(), "end of last object must match end of space");
323 }
324
325 void ThreadLocalAllocBuffer::set_sample_end() {
326 size_t heap_words_remaining = pointer_delta(_current_end, _top);
327 size_t bytes_until_sample = myThread()->heap_sampler().bytes_until_sample();
328 size_t words_until_sample = bytes_until_sample / HeapWordSize;;
329
330 if (heap_words_remaining > words_until_sample) {
331 HeapWord* new_end = _top + words_until_sample;
332 set_current_end(new_end);
333 _bytes_since_last_sample_point = bytes_until_sample;
334 } else {
335 _bytes_since_last_sample_point = heap_words_remaining * HeapWordSize;;
336 }
337 }
338
339 Thread* ThreadLocalAllocBuffer::myThread() {
340 return (Thread*)(((char *)this) +
341 in_bytes(start_offset()) -
342 in_bytes(Thread::tlab_start_offset()));
343 }
344
345 void ThreadLocalAllocBuffer::set_back_allocation_end() {
346 _current_end = _allocation_end;
347 }
348
349 HeapWord* ThreadLocalAllocBuffer::allocate_sampled_object(size_t size) {
350 Thread* thread = myThread();
351 thread->tlab().set_back_allocation_end();
352 HeapWord* result = thread->tlab().allocate(size);
353
354 if (result) {
355 thread->heap_sampler().check_for_sampling(result, size * HeapWordSize, _bytes_since_last_sample_point);
356 thread->tlab().set_sample_end();
357 }
358
359 return result;
360 }
361
362 HeapWord* ThreadLocalAllocBuffer::reserved_end() {
363 return _allocation_end + alignment_reserve();
364 }
365
366 GlobalTLABStats::GlobalTLABStats() :
367 _allocating_threads_avg(TLABAllocationWeight) {
368
369 initialize();
370
371 _allocating_threads_avg.sample(1); // One allocating thread at startup
372
373 if (UsePerfData) {
374
375 EXCEPTION_MARK;
376 ResourceMark rm;
377
378 char* cname = PerfDataManager::counter_name("tlab", "allocThreads");
379 _perf_allocating_threads =
380 PerfDataManager::create_variable(SUN_GC, cname, PerfData::U_None, CHECK);
381
382 cname = PerfDataManager::counter_name("tlab", "fills");
|