< prev index next >

src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp

Print this page
rev 48551 : [mq]: heap8
rev 48553 : [mq]: heap14_rebased
rev 48559 : [mq]: heap20
rev 48562 : [mq]: heap23
rev 48563 : [mq]: heap_to_thread
rev 48564 : [mq]: update-spec
rev 48565 : [mq]: event

*** 27,36 **** --- 27,37 ---- #include "gc/shared/threadLocalAllocBuffer.inline.hpp" #include "logging/log.hpp" #include "memory/resourceArea.hpp" #include "memory/universe.inline.hpp" #include "oops/oop.inline.hpp" + #include "runtime/heapMonitoring.hpp" #include "runtime/thread.inline.hpp" #include "runtime/threadSMR.hpp" #include "utilities/copy.hpp" // Thread-Local Edens support
*** 44,53 **** --- 45,62 ---- void ThreadLocalAllocBuffer::clear_before_allocation() { _slow_refill_waste += (unsigned)remaining(); make_parsable(true); // also retire the TLAB } + size_t ThreadLocalAllocBuffer::remaining() { + if (current_end() == NULL) { + return 0; + } + + return pointer_delta(reserved_end(), top()); + } + void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() { global_stats()->initialize(); for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) { thread->tlab().accumulate_statistics();
*** 106,133 **** // Fills the current tlab with a dummy filler array to create // an illusion of a contiguous Eden and optionally retires the tlab. // Waste accounting should be done in caller as appropriate; see, // for example, clear_before_allocation(). void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) { ! if (end() != NULL) { invariants(); if (retire) { myThread()->incr_allocated_bytes(used_bytes()); } ! CollectedHeap::fill_with_object(top(), hard_end(), retire && zap); if (retire || ZeroTLAB) { // "Reset" the TLAB set_start(NULL); set_top(NULL); set_pf_top(NULL); ! set_end(NULL); } } assert(!(retire || ZeroTLAB) || ! (start() == NULL && end() == NULL && top() == NULL), "TLAB must be reset"); } void ThreadLocalAllocBuffer::resize_all_tlabs() { if (ResizeTLAB) { --- 115,144 ---- // Fills the current tlab with a dummy filler array to create // an illusion of a contiguous Eden and optionally retires the tlab. // Waste accounting should be done in caller as appropriate; see, // for example, clear_before_allocation(). void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) { ! if (current_end() != NULL) { invariants(); if (retire) { myThread()->incr_allocated_bytes(used_bytes()); } ! CollectedHeap::fill_with_object(top(), reserved_end(), retire && zap); if (retire || ZeroTLAB) { // "Reset" the TLAB set_start(NULL); set_top(NULL); set_pf_top(NULL); ! set_current_end(NULL); ! set_allocation_end(NULL); } } assert(!(retire || ZeroTLAB) || ! (start() == NULL && current_end() == NULL && top() == NULL && ! _allocation_end == NULL), "TLAB must be reset"); } void ThreadLocalAllocBuffer::resize_all_tlabs() { if (ResizeTLAB) {
*** 169,191 **** HeapWord* top, size_t new_size) { _number_of_refills++; print_stats("fill"); assert(top <= start + new_size - alignment_reserve(), "size too small"); initialize(start, top, start + new_size - alignment_reserve()); // Reset amount of internal fragmentation set_refill_waste_limit(initial_refill_waste_limit()); } void ThreadLocalAllocBuffer::initialize(HeapWord* start, HeapWord* top, HeapWord* end) { set_start(start); set_top(top); set_pf_top(top); ! set_end(end); invariants(); } void ThreadLocalAllocBuffer::initialize() { initialize(NULL, // start --- 180,208 ---- HeapWord* top, size_t new_size) { _number_of_refills++; print_stats("fill"); assert(top <= start + new_size - alignment_reserve(), "size too small"); + initialize(start, top, start + new_size - alignment_reserve()); + if (HeapMonitoring::enabled()) { + set_sample_end(); + } + // Reset amount of internal fragmentation set_refill_waste_limit(initial_refill_waste_limit()); } void ThreadLocalAllocBuffer::initialize(HeapWord* start, HeapWord* top, HeapWord* end) { set_start(start); set_top(top); set_pf_top(top); ! set_current_end(end); ! set_allocation_end(end); invariants(); } void ThreadLocalAllocBuffer::initialize() { initialize(NULL, // start
*** 304,319 **** --- 321,370 ---- p += oop(p)->size(); } guarantee(p == top(), "end of last object must match end of space"); } + void ThreadLocalAllocBuffer::set_sample_end() { + size_t heap_words_remaining = pointer_delta(_current_end, _top); + size_t bytes_until_sample = myThread()->heap_sampler().bytes_until_sample(); + size_t words_until_sample = bytes_until_sample / HeapWordSize;; + + if (heap_words_remaining > words_until_sample) { + HeapWord* new_end = _top + words_until_sample; + set_current_end(new_end); + _bytes_since_last_sample_point = bytes_until_sample; + } else { + _bytes_since_last_sample_point = heap_words_remaining * HeapWordSize;; + } + } + Thread* ThreadLocalAllocBuffer::myThread() { return (Thread*)(((char *)this) + in_bytes(start_offset()) - in_bytes(Thread::tlab_start_offset())); } + void ThreadLocalAllocBuffer::set_back_allocation_end() { + _current_end = _allocation_end; + } + + HeapWord* ThreadLocalAllocBuffer::allocate_sampled_object(size_t size) { + Thread* thread = myThread(); + thread->tlab().set_back_allocation_end(); + HeapWord* result = thread->tlab().allocate(size); + + if (result) { + thread->heap_sampler().check_for_sampling(result, size * HeapWordSize, _bytes_since_last_sample_point); + thread->tlab().set_sample_end(); + } + + return result; + } + + HeapWord* ThreadLocalAllocBuffer::reserved_end() { + return _allocation_end + alignment_reserve(); + } GlobalTLABStats::GlobalTLABStats() : _allocating_threads_avg(TLABAllocationWeight) { initialize();
< prev index next >