< prev index next >
src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp
Print this page
rev 48551 : [mq]: heap8
rev 48553 : [mq]: heap14_rebased
rev 48559 : [mq]: heap20
rev 48562 : [mq]: heap23
*** 45,54 ****
--- 45,64 ----
void ThreadLocalAllocBuffer::clear_before_allocation() {
_slow_refill_waste += (unsigned)remaining();
make_parsable(true); // also retire the TLAB
}
+ size_t ThreadLocalAllocBuffer::remaining() {
+ if (current_end() == NULL) {
+ return 0;
+ }
+
+ // TODO: To be deprecated when FastTLABRefill is deprecated.
+ update_end_pointers();
+ return pointer_delta(reserved_end(), top());
+ }
+
void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
global_stats()->initialize();
for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
thread->tlab().accumulate_statistics();
*** 107,137 ****
// Fills the current tlab with a dummy filler array to create
// an illusion of a contiguous Eden and optionally retires the tlab.
// Waste accounting should be done in caller as appropriate; see,
// for example, clear_before_allocation().
void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) {
! if (end() != NULL) {
invariants();
if (retire) {
myThread()->incr_allocated_bytes(used_bytes());
}
! CollectedHeap::fill_with_object(top(), hard_end(), retire && zap);
if (retire || ZeroTLAB) { // "Reset" the TLAB
set_start(NULL);
set_top(NULL);
set_pf_top(NULL);
! set_end(NULL);
! set_actual_end(NULL);
! set_slow_path_end(NULL);
}
}
assert(!(retire || ZeroTLAB) ||
! (start() == NULL && end() == NULL && top() == NULL &&
! _actual_end == NULL && _slow_path_end == NULL),
"TLAB must be reset");
}
void ThreadLocalAllocBuffer::resize_all_tlabs() {
if (ResizeTLAB) {
--- 117,149 ----
// Fills the current tlab with a dummy filler array to create
// an illusion of a contiguous Eden and optionally retires the tlab.
// Waste accounting should be done in caller as appropriate; see,
// for example, clear_before_allocation().
void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) {
! if (current_end() != NULL) {
invariants();
if (retire) {
myThread()->incr_allocated_bytes(used_bytes());
}
! // TODO: To be deprecated when FastTLABRefill is deprecated.
! update_end_pointers();
! CollectedHeap::fill_with_object(top(), reserved_end(), retire && zap);
if (retire || ZeroTLAB) { // "Reset" the TLAB
set_start(NULL);
set_top(NULL);
set_pf_top(NULL);
! set_current_end(NULL);
! set_allocation_end(NULL);
! set_last_slow_path_end(NULL);
}
}
assert(!(retire || ZeroTLAB) ||
! (start() == NULL && current_end() == NULL && top() == NULL &&
! _allocation_end == NULL && _last_slow_path_end == NULL),
"TLAB must be reset");
}
void ThreadLocalAllocBuffer::resize_all_tlabs() {
if (ResizeTLAB) {
*** 198,210 ****
HeapWord* top,
HeapWord* end) {
set_start(start);
set_top(top);
set_pf_top(top);
! set_end(end);
! set_actual_end(end);
! set_slow_path_end(end);
invariants();
_bytes_until_sample = 0;
}
void ThreadLocalAllocBuffer::initialize() {
--- 210,222 ----
HeapWord* top,
HeapWord* end) {
set_start(start);
set_top(top);
set_pf_top(top);
! set_current_end(end);
! set_allocation_end(end);
! set_last_slow_path_end(end);
invariants();
_bytes_until_sample = 0;
}
void ThreadLocalAllocBuffer::initialize() {
*** 325,342 ****
}
guarantee(p == top(), "end of last object must match end of space");
}
void ThreadLocalAllocBuffer::set_sample_end() {
! size_t heap_words_remaining = pointer_delta(_end, _top);
size_t bytes_left = _bytes_until_sample;
size_t words_until_sample = bytes_left / HeapWordSize;
if (heap_words_remaining > words_until_sample) {
HeapWord* new_end = _top + words_until_sample;
! set_end(new_end);
! set_slow_path_end(new_end);
set_bytes_until_sample(0);
} else {
bytes_left -= heap_words_remaining * HeapWordSize;
set_bytes_until_sample(bytes_left);
}
--- 337,354 ----
}
guarantee(p == top(), "end of last object must match end of space");
}
void ThreadLocalAllocBuffer::set_sample_end() {
! size_t heap_words_remaining = pointer_delta(_current_end, _top);
size_t bytes_left = _bytes_until_sample;
size_t words_until_sample = bytes_left / HeapWordSize;
if (heap_words_remaining > words_until_sample) {
HeapWord* new_end = _top + words_until_sample;
! set_current_end(new_end);
! set_last_slow_path_end(new_end);
set_bytes_until_sample(0);
} else {
bytes_left -= heap_words_remaining * HeapWordSize;
set_bytes_until_sample(bytes_left);
}
*** 359,426 ****
}
set_sample_end();
log_trace(gc, tlab)("TLAB picked next sample: thread: " INTPTR_FORMAT " [id: %2d]"
! " start: " INTPTR_FORMAT " top: " INTPTR_FORMAT " end: " INTPTR_FORMAT " actual_end:"
! INTPTR_FORMAT " slow_path_end: " INTPTR_FORMAT,
p2i(myThread()), myThread()->osthread()->thread_id(),
! p2i(start()), p2i(top()), p2i(end()),
! p2i(_actual_end), p2i(_slow_path_end));
}
Thread* ThreadLocalAllocBuffer::myThread() {
return (Thread*)(((char *)this) +
in_bytes(start_offset()) -
in_bytes(Thread::tlab_start_offset()));
}
! void ThreadLocalAllocBuffer::set_back_actual_end() {
// Did a fast TLAB refill occur?
! if (_slow_path_end != _end) {
// Fix up the actual end to be now the end of this TLAB.
! _slow_path_end = _end;
! _actual_end = _end;
} else {
! _end = _actual_end;
}
}
void ThreadLocalAllocBuffer::handle_sample(Thread* thread, HeapWord* result,
! size_t size) {
if (!HeapMonitoring::enabled()) {
return;
}
! size_t size_in_bytes = size * HeapWordSize;
! if (_bytes_until_sample > size_in_bytes) {
! set_bytes_until_sample(_bytes_until_sample - size_in_bytes);
! } else {
! // Technically this is not exactly right, we probably should remember how many bytes are
! // negative probably to then reduce our next sample size.
! set_bytes_until_sample(0);
! }
!
! // Should we sample now?
! if (should_sample()) {
HeapMonitoring::object_alloc_do_sample(thread,
reinterpret_cast<oopDesc*>(result),
size_in_bytes);
- set_back_actual_end();
- pick_next_sample();
}
}
! HeapWord* ThreadLocalAllocBuffer::hard_end() {
! // Did a fast TLAB refill occur?
! if (_slow_path_end != _end) {
! // Fix up the actual end to be now the end of this TLAB.
! _slow_path_end = _end;
! _actual_end = _end;
}
! return _actual_end + alignment_reserve();
}
GlobalTLABStats::GlobalTLABStats() :
_allocating_threads_avg(TLABAllocationWeight) {
--- 371,446 ----
}
set_sample_end();
log_trace(gc, tlab)("TLAB picked next sample: thread: " INTPTR_FORMAT " [id: %2d]"
! " start: " INTPTR_FORMAT " top: " INTPTR_FORMAT " end: "
! INTPTR_FORMAT " allocation_end:"
! INTPTR_FORMAT " last_slow_path_end: " INTPTR_FORMAT,
p2i(myThread()), myThread()->osthread()->thread_id(),
! p2i(start()), p2i(top()), p2i(current_end()),
! p2i(_allocation_end), p2i(_last_slow_path_end));
}
Thread* ThreadLocalAllocBuffer::myThread() {
return (Thread*)(((char *)this) +
in_bytes(start_offset()) -
in_bytes(Thread::tlab_start_offset()));
}
! void ThreadLocalAllocBuffer::set_back_allocation_end() {
// Did a fast TLAB refill occur?
! if (_last_slow_path_end != _current_end) {
// Fix up the actual end to be now the end of this TLAB.
! _last_slow_path_end = _current_end;
! _allocation_end = _current_end;
} else {
! _current_end = _allocation_end;
}
}
void ThreadLocalAllocBuffer::handle_sample(Thread* thread, HeapWord* result,
! size_t size_in_bytes) {
if (!HeapMonitoring::enabled()) {
return;
}
! if (_bytes_until_sample < size_in_bytes) {
HeapMonitoring::object_alloc_do_sample(thread,
reinterpret_cast<oopDesc*>(result),
size_in_bytes);
}
+
+ update_tlab_sample_point(size_in_bytes);
+ }
+
+ void ThreadLocalAllocBuffer::update_tlab_sample_point(size_t size_in_bytes) {
+ if (_bytes_until_sample > size_in_bytes) {
+ _bytes_until_sample -= size_in_bytes;
+ return;
+ }
+
+ // We sampled here, so reset it all and start a new sample point.
+ set_bytes_until_sample(0);
+ set_back_allocation_end();
+ pick_next_sample();
}
! void ThreadLocalAllocBuffer::update_end_pointers() {
! // Did a fast TLAB refill occur? (This will be deprecated when fast TLAB
! // refill disappears).
! if (_last_slow_path_end != _current_end) {
! // Fix up the last slow path end to be now the end of this TLAB.
! _last_slow_path_end = _current_end;
! _allocation_end = _current_end;
}
+ }
! HeapWord* ThreadLocalAllocBuffer::reserved_end() {
! assert (_last_slow_path_end == _current_end,
! "Have to call update_end_pointers before reserved_end.");
! return _allocation_end + alignment_reserve();
}
GlobalTLABStats::GlobalTLABStats() :
_allocating_threads_avg(TLABAllocationWeight) {
< prev index next >