111 if (end() != NULL) {
112 invariants();
113
114 if (retire) {
115 myThread()->incr_allocated_bytes(used_bytes());
116 }
117
118 CollectedHeap::fill_with_object(top(), hard_end(), retire && zap);
119
120 if (retire || ZeroTLAB) { // "Reset" the TLAB
121 set_start(NULL);
122 set_top(NULL);
123 set_pf_top(NULL);
124 set_end(NULL);
125 set_actual_end(NULL);
126 set_slow_path_end(NULL);
127 }
128 }
129 assert(!(retire || ZeroTLAB) ||
130 (start() == NULL && end() == NULL && top() == NULL &&
131 actual_end() == NULL && slow_path_end() == NULL),
132 "TLAB must be reset");
133 }
134
135 void ThreadLocalAllocBuffer::resize_all_tlabs() {
136 if (ResizeTLAB) {
137 for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
138 thread->tlab().resize();
139 }
140 }
141 }
142
143 void ThreadLocalAllocBuffer::resize() {
144 // Compute the next tlab size using expected allocation amount
145 assert(ResizeTLAB, "Should not call this otherwise");
146 size_t alloc = (size_t)(_allocation_fraction.average() *
147 (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize));
148 size_t new_size = alloc / _target_refills;
149
150 new_size = MIN2(MAX2(new_size, min_size()), max_size());
151
162
163 void ThreadLocalAllocBuffer::initialize_statistics() {
164 _number_of_refills = 0;
165 _fast_refill_waste = 0;
166 _slow_refill_waste = 0;
167 _gc_waste = 0;
168 _slow_allocations = 0;
169 }
170
171 void ThreadLocalAllocBuffer::fill(HeapWord* start,
172 HeapWord* top,
173 size_t new_size) {
174 _number_of_refills++;
175 print_stats("fill");
176 assert(top <= start + new_size - alignment_reserve(), "size too small");
177
178 // Remember old bytes until sample for the next tlab only if this is our first
179 // actual refill.
180 size_t old_bytes_until_sample = 0;
181 if (_number_of_refills > 1) {
182 old_bytes_until_sample = bytes_until_sample();
183 }
184
185 initialize(start, top, start + new_size - alignment_reserve());
186
187 if (old_bytes_until_sample > 0) {
188 set_bytes_until_sample(old_bytes_until_sample);
189 set_sample_end();
190 }
191
192 // Reset amount of internal fragmentation
193 set_refill_waste_limit(initial_refill_waste_limit());
194 }
195
196 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
197 HeapWord* top,
198 HeapWord* end) {
199 set_start(start);
200 set_top(top);
201 set_pf_top(top);
202 set_end(end);
309 _allocation_fraction.average() * tlab_used / K,
310 _number_of_refills, waste_percent,
311 _gc_waste * HeapWordSize,
312 _slow_refill_waste * HeapWordSize,
313 _fast_refill_waste * HeapWordSize);
314 }
315
316 void ThreadLocalAllocBuffer::verify() {
317 HeapWord* p = start();
318 HeapWord* t = top();
319 HeapWord* prev_p = NULL;
320 while (p < t) {
321 oop(p)->verify();
322 prev_p = p;
323 p += oop(p)->size();
324 }
325 guarantee(p == top(), "end of last object must match end of space");
326 }
327
328 void ThreadLocalAllocBuffer::set_sample_end() {
329 size_t heap_words_remaining = _end - _top;
330 size_t bytes_left = bytes_until_sample();
331 size_t words_until_sample = bytes_left / HeapWordSize;
332
333 if (heap_words_remaining > words_until_sample) {
334 HeapWord* new_end = _top + words_until_sample;
335 set_end(new_end);
336 set_slow_path_end(new_end);
337 set_bytes_until_sample(0);
338 } else {
339 bytes_left -= heap_words_remaining * HeapWordSize;
340 set_bytes_until_sample(bytes_left);
341 }
342 }
343
344 void ThreadLocalAllocBuffer::pick_next_sample(size_t diff) {
345 if (!HeapMonitoring::enabled()) {
346 return;
347 }
348
349 if (bytes_until_sample() == 0) {
350 HeapMonitoring::pick_next_sample(bytes_until_sample_addr());
351 }
352
353 if (diff > 0) {
354 // Try to correct sample size by removing extra space from last allocation.
355 if (bytes_until_sample() > diff * HeapWordSize) {
356 set_bytes_until_sample(bytes_until_sample() - diff * HeapWordSize);
357 }
358 }
359
360 set_sample_end();
361
362 log_trace(gc, tlab)("TLAB picked next sample: thread: " INTPTR_FORMAT " [id: %2d]"
363 " start: %p top: %p end: %p actual_end: %p slow_path_end: %p",
364 p2i(myThread()), myThread()->osthread()->thread_id(),
365 start(), top(), end(),
366 actual_end(), slow_path_end());
367 }
368
369 Thread* ThreadLocalAllocBuffer::myThread() {
370 return (Thread*)(((char *)this) +
371 in_bytes(start_offset()) -
372 in_bytes(Thread::tlab_start_offset()));
373 }
374
375 void ThreadLocalAllocBuffer::set_back_actual_end() {
376 // Did a fast TLAB refill occur?
377 if (_slow_path_end != _end) {
378 // Fix up the actual end to be now the end of this TLAB.
379 _slow_path_end = _end;
380 _actual_end = _end;
381 } else {
382 _end = _actual_end;
383 }
384 }
385
386 void ThreadLocalAllocBuffer::handle_sample(Thread* thread, HeapWord* result,
387 size_t size) {
388 if (!HeapMonitoring::enabled()) {
389 return;
390 }
391
392 size_t size_in_bytes = size * HeapWordSize;
393 if (bytes_until_sample() > size_in_bytes) {
394 set_bytes_until_sample(bytes_until_sample() - size_in_bytes);
395 } else {
396 // Technically this is not exactly right, we probably should remember how many bytes are
397 // negative probably to then reduce our next sample size.
398 set_bytes_until_sample(0);
399 }
400
401 // Should we sample now?
402 if (should_sample()) {
403 HeapMonitoring::object_alloc_do_sample(thread,
404 reinterpret_cast<oopDesc*>(result),
405 size_in_bytes);
406 set_back_actual_end();
407 pick_next_sample();
408 }
409 }
410
411 HeapWord* ThreadLocalAllocBuffer::hard_end() {
412 // Did a fast TLAB refill occur?
413 if (_slow_path_end != _end) {
414 // Fix up the actual end to be now the end of this TLAB.
|
111 if (end() != NULL) {
112 invariants();
113
114 if (retire) {
115 myThread()->incr_allocated_bytes(used_bytes());
116 }
117
118 CollectedHeap::fill_with_object(top(), hard_end(), retire && zap);
119
120 if (retire || ZeroTLAB) { // "Reset" the TLAB
121 set_start(NULL);
122 set_top(NULL);
123 set_pf_top(NULL);
124 set_end(NULL);
125 set_actual_end(NULL);
126 set_slow_path_end(NULL);
127 }
128 }
129 assert(!(retire || ZeroTLAB) ||
130 (start() == NULL && end() == NULL && top() == NULL &&
131 _actual_end == NULL && _slow_path_end == NULL),
132 "TLAB must be reset");
133 }
134
135 void ThreadLocalAllocBuffer::resize_all_tlabs() {
136 if (ResizeTLAB) {
137 for (JavaThread *thread = Threads::first(); thread != NULL; thread = thread->next()) {
138 thread->tlab().resize();
139 }
140 }
141 }
142
143 void ThreadLocalAllocBuffer::resize() {
144 // Compute the next tlab size using expected allocation amount
145 assert(ResizeTLAB, "Should not call this otherwise");
146 size_t alloc = (size_t)(_allocation_fraction.average() *
147 (Universe::heap()->tlab_capacity(myThread()) / HeapWordSize));
148 size_t new_size = alloc / _target_refills;
149
150 new_size = MIN2(MAX2(new_size, min_size()), max_size());
151
162
163 void ThreadLocalAllocBuffer::initialize_statistics() {
164 _number_of_refills = 0;
165 _fast_refill_waste = 0;
166 _slow_refill_waste = 0;
167 _gc_waste = 0;
168 _slow_allocations = 0;
169 }
170
171 void ThreadLocalAllocBuffer::fill(HeapWord* start,
172 HeapWord* top,
173 size_t new_size) {
174 _number_of_refills++;
175 print_stats("fill");
176 assert(top <= start + new_size - alignment_reserve(), "size too small");
177
178 // Remember old bytes until sample for the next tlab only if this is our first
179 // actual refill.
180 size_t old_bytes_until_sample = 0;
181 if (_number_of_refills > 1) {
182 old_bytes_until_sample = _bytes_until_sample;
183 }
184
185 initialize(start, top, start + new_size - alignment_reserve());
186
187 if (old_bytes_until_sample > 0) {
188 set_bytes_until_sample(old_bytes_until_sample);
189 set_sample_end();
190 }
191
192 // Reset amount of internal fragmentation
193 set_refill_waste_limit(initial_refill_waste_limit());
194 }
195
196 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
197 HeapWord* top,
198 HeapWord* end) {
199 set_start(start);
200 set_top(top);
201 set_pf_top(top);
202 set_end(end);
309 _allocation_fraction.average() * tlab_used / K,
310 _number_of_refills, waste_percent,
311 _gc_waste * HeapWordSize,
312 _slow_refill_waste * HeapWordSize,
313 _fast_refill_waste * HeapWordSize);
314 }
315
316 void ThreadLocalAllocBuffer::verify() {
317 HeapWord* p = start();
318 HeapWord* t = top();
319 HeapWord* prev_p = NULL;
320 while (p < t) {
321 oop(p)->verify();
322 prev_p = p;
323 p += oop(p)->size();
324 }
325 guarantee(p == top(), "end of last object must match end of space");
326 }
327
328 void ThreadLocalAllocBuffer::set_sample_end() {
329 size_t heap_words_remaining = pointer_delta(_end, _top);
330 size_t bytes_left = _bytes_until_sample;
331 size_t words_until_sample = bytes_left / HeapWordSize;
332
333 if (heap_words_remaining > words_until_sample) {
334 HeapWord* new_end = _top + words_until_sample;
335 set_end(new_end);
336 set_slow_path_end(new_end);
337 set_bytes_until_sample(0);
338 } else {
339 bytes_left -= heap_words_remaining * HeapWordSize;
340 set_bytes_until_sample(bytes_left);
341 }
342 }
343
344 void ThreadLocalAllocBuffer::pick_next_sample(size_t overflowed_words) {
345 if (!HeapMonitoring::enabled()) {
346 return;
347 }
348
349 if (_bytes_until_sample == 0) {
350 HeapMonitoring::pick_next_sample(&_bytes_until_sample);
351 }
352
353 if (overflowed_words > 0) {
354 // Try to correct sample size by removing extra space from last allocation.
355 if (_bytes_until_sample > overflowed_words * HeapWordSize) {
356 set_bytes_until_sample(_bytes_until_sample - overflowed_words * HeapWordSize);
357 }
358 }
359
360 set_sample_end();
361
362 log_trace(gc, tlab)("TLAB picked next sample: thread: " INTPTR_FORMAT " [id: %2d]"
363 " start: " INTPTR_FORMAT " top: " INTPTR_FORMAT " end: " INTPTR_FORMAT " actual_end:"
364 INTPTR_FORMAT " slow_path_end: " INTPTR_FORMAT,
365 p2i(myThread()), myThread()->osthread()->thread_id(),
366 p2i(start()), p2i(top()), p2i(end()),
367 p2i(_actual_end), p2i(_slow_path_end));
368 }
369
370 Thread* ThreadLocalAllocBuffer::myThread() {
371 return (Thread*)(((char *)this) +
372 in_bytes(start_offset()) -
373 in_bytes(Thread::tlab_start_offset()));
374 }
375
376 void ThreadLocalAllocBuffer::set_back_actual_end() {
377 // Did a fast TLAB refill occur?
378 if (_slow_path_end != _end) {
379 // Fix up the actual end to be now the end of this TLAB.
380 _slow_path_end = _end;
381 _actual_end = _end;
382 } else {
383 _end = _actual_end;
384 }
385 }
386
387 void ThreadLocalAllocBuffer::handle_sample(Thread* thread, HeapWord* result,
388 size_t size) {
389 if (!HeapMonitoring::enabled()) {
390 return;
391 }
392
393 size_t size_in_bytes = size * HeapWordSize;
394 if (_bytes_until_sample > size_in_bytes) {
395 set_bytes_until_sample(_bytes_until_sample - size_in_bytes);
396 } else {
397 // Technically this is not exactly right, we probably should remember how many bytes are
398 // negative probably to then reduce our next sample size.
399 set_bytes_until_sample(0);
400 }
401
402 // Should we sample now?
403 if (should_sample()) {
404 HeapMonitoring::object_alloc_do_sample(thread,
405 reinterpret_cast<oopDesc*>(result),
406 size_in_bytes);
407 set_back_actual_end();
408 pick_next_sample();
409 }
410 }
411
412 HeapWord* ThreadLocalAllocBuffer::hard_end() {
413 // Did a fast TLAB refill occur?
414 if (_slow_path_end != _end) {
415 // Fix up the actual end to be now the end of this TLAB.
|