< prev index next >

src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp

Print this page
rev 48551 : [mq]: heap8
rev 48553 : [mq]: heap14_rebased

*** 27,36 **** --- 27,37 ---- #include "gc/shared/threadLocalAllocBuffer.inline.hpp" #include "logging/log.hpp" #include "memory/resourceArea.hpp" #include "memory/universe.inline.hpp" #include "oops/oop.inline.hpp" + #include "runtime/heapMonitoring.hpp" #include "runtime/thread.inline.hpp" #include "runtime/threadSMR.hpp" #include "utilities/copy.hpp" // Thread-Local Edens support
*** 120,133 **** if (retire || ZeroTLAB) { // "Reset" the TLAB set_start(NULL); set_top(NULL); set_pf_top(NULL); set_end(NULL); } } assert(!(retire || ZeroTLAB) || ! (start() == NULL && end() == NULL && top() == NULL), "TLAB must be reset"); } void ThreadLocalAllocBuffer::resize_all_tlabs() { if (ResizeTLAB) { --- 121,137 ---- if (retire || ZeroTLAB) { // "Reset" the TLAB set_start(NULL); set_top(NULL); set_pf_top(NULL); set_end(NULL); + set_actual_end(NULL); + set_slow_path_end(NULL); } } assert(!(retire || ZeroTLAB) || ! (start() == NULL && end() == NULL && top() == NULL && ! _actual_end == NULL && _slow_path_end == NULL), "TLAB must be reset"); } void ThreadLocalAllocBuffer::resize_all_tlabs() { if (ResizeTLAB) {
*** 169,180 **** --- 173,197 ---- HeapWord* top, size_t new_size) { _number_of_refills++; print_stats("fill"); assert(top <= start + new_size - alignment_reserve(), "size too small"); + + // Remember old bytes until sample for the next tlab only if this is our first + // actual refill. + size_t old_bytes_until_sample = 0; + if (_number_of_refills > 1) { + old_bytes_until_sample = _bytes_until_sample; + } + initialize(start, top, start + new_size - alignment_reserve()); + if (old_bytes_until_sample > 0) { + set_bytes_until_sample(old_bytes_until_sample); + set_sample_end(); + } + // Reset amount of internal fragmentation set_refill_waste_limit(initial_refill_waste_limit()); } void ThreadLocalAllocBuffer::initialize(HeapWord* start,
*** 182,192 **** --- 199,212 ---- HeapWord* end) { set_start(start); set_top(top); set_pf_top(top); set_end(end); + set_actual_end(end); + set_slow_path_end(end); invariants(); + _bytes_until_sample = 0; } void ThreadLocalAllocBuffer::initialize() { initialize(NULL, // start NULL, // top
*** 304,320 **** --- 324,427 ---- p += oop(p)->size(); } guarantee(p == top(), "end of last object must match end of space"); } + void ThreadLocalAllocBuffer::set_sample_end() { + size_t heap_words_remaining = pointer_delta(_end, _top); + size_t bytes_left = _bytes_until_sample; + size_t words_until_sample = bytes_left / HeapWordSize; + + if (heap_words_remaining > words_until_sample) { + HeapWord* new_end = _top + words_until_sample; + set_end(new_end); + set_slow_path_end(new_end); + set_bytes_until_sample(0); + } else { + bytes_left -= heap_words_remaining * HeapWordSize; + set_bytes_until_sample(bytes_left); + } + } + + void ThreadLocalAllocBuffer::pick_next_sample(size_t overflowed_words) { + if (!HeapMonitoring::enabled()) { + return; + } + + if (_bytes_until_sample == 0) { + HeapMonitoring::pick_next_sample(&_bytes_until_sample); + } + + if (overflowed_words > 0) { + // Try to correct sample size by removing extra space from last allocation. + if (_bytes_until_sample > overflowed_words * HeapWordSize) { + set_bytes_until_sample(_bytes_until_sample - overflowed_words * HeapWordSize); + } + } + + set_sample_end(); + + log_trace(gc, tlab)("TLAB picked next sample: thread: " INTPTR_FORMAT " [id: %2d]" + " start: " INTPTR_FORMAT " top: " INTPTR_FORMAT " end: " INTPTR_FORMAT " actual_end:" + INTPTR_FORMAT " slow_path_end: " INTPTR_FORMAT, + p2i(myThread()), myThread()->osthread()->thread_id(), + p2i(start()), p2i(top()), p2i(end()), + p2i(_actual_end), p2i(_slow_path_end)); + } + Thread* ThreadLocalAllocBuffer::myThread() { return (Thread*)(((char *)this) + in_bytes(start_offset()) - in_bytes(Thread::tlab_start_offset())); } + void ThreadLocalAllocBuffer::set_back_actual_end() { + // Did a fast TLAB refill occur? + if (_slow_path_end != _end) { + // Fix up the actual end to be now the end of this TLAB. + _slow_path_end = _end; + _actual_end = _end; + } else { + _end = _actual_end; + } + } + + void ThreadLocalAllocBuffer::handle_sample(Thread* thread, HeapWord* result, + size_t size) { + if (!HeapMonitoring::enabled()) { + return; + } + + size_t size_in_bytes = size * HeapWordSize; + if (_bytes_until_sample > size_in_bytes) { + set_bytes_until_sample(_bytes_until_sample - size_in_bytes); + } else { + // Technically this is not exactly right, we probably should remember how many bytes are + // negative probably to then reduce our next sample size. + set_bytes_until_sample(0); + } + + // Should we sample now? + if (should_sample()) { + HeapMonitoring::object_alloc_do_sample(thread, + reinterpret_cast<oopDesc*>(result), + size_in_bytes); + set_back_actual_end(); + pick_next_sample(); + } + } + + HeapWord* ThreadLocalAllocBuffer::hard_end() { + // Did a fast TLAB refill occur? + if (_slow_path_end != _end) { + // Fix up the actual end to be now the end of this TLAB. + _slow_path_end = _end; + _actual_end = _end; + } + return _actual_end + alignment_reserve(); + } GlobalTLABStats::GlobalTLABStats() : _allocating_threads_avg(TLABAllocationWeight) { initialize();
< prev index next >