< prev index next >

src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp

Print this page
rev 49643 : [mq]: heap8
rev 49644 : [mq]: event_rebased
rev 49649 : [mq]: heap14

*** 45,59 **** _slow_refill_waste += (unsigned)remaining(); make_parsable(true); // also retire the TLAB } size_t ThreadLocalAllocBuffer::remaining() { ! if (current_end() == NULL) { return 0; } ! return pointer_delta(reserved_end(), top()); } void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() { global_stats()->initialize(); --- 45,59 ---- _slow_refill_waste += (unsigned)remaining(); make_parsable(true); // also retire the TLAB } size_t ThreadLocalAllocBuffer::remaining() { ! if (fast_path_end() == NULL) { return 0; } ! return pointer_delta(hard_end(), top()); } void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() { global_stats()->initialize();
*** 114,142 **** // Fills the current tlab with a dummy filler array to create // an illusion of a contiguous Eden and optionally retires the tlab. // Waste accounting should be done in caller as appropriate; see, // for example, clear_before_allocation(). void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) { ! if (current_end() != NULL) { invariants(); if (retire) { myThread()->incr_allocated_bytes(used_bytes()); } ! CollectedHeap::fill_with_object(top(), reserved_end(), retire && zap); if (retire || ZeroTLAB) { // "Reset" the TLAB set_start(NULL); set_top(NULL); set_pf_top(NULL); ! set_current_end(NULL); set_allocation_end(NULL); } } assert(!(retire || ZeroTLAB) || ! (start() == NULL && current_end() == NULL && top() == NULL && _allocation_end == NULL), "TLAB must be reset"); } void ThreadLocalAllocBuffer::resize_all_tlabs() { --- 114,142 ---- // Fills the current tlab with a dummy filler array to create // an illusion of a contiguous Eden and optionally retires the tlab. // Waste accounting should be done in caller as appropriate; see, // for example, clear_before_allocation(). void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) { ! if (fast_path_end() != NULL) { invariants(); if (retire) { myThread()->incr_allocated_bytes(used_bytes()); } ! CollectedHeap::fill_with_object(top(), hard_end(), retire && zap); if (retire || ZeroTLAB) { // "Reset" the TLAB set_start(NULL); set_top(NULL); set_pf_top(NULL); ! set_fast_path_end(NULL); set_allocation_end(NULL); } } assert(!(retire || ZeroTLAB) || ! (start() == NULL && fast_path_end() == NULL && top() == NULL && _allocation_end == NULL), "TLAB must be reset"); } void ThreadLocalAllocBuffer::resize_all_tlabs() {
*** 196,206 **** HeapWord* top, HeapWord* end) { set_start(start); set_top(top); set_pf_top(top); ! set_current_end(end); set_allocation_end(end); invariants(); } void ThreadLocalAllocBuffer::initialize() { --- 196,206 ---- HeapWord* top, HeapWord* end) { set_start(start); set_top(top); set_pf_top(top); ! set_fast_path_end(end); set_allocation_end(end); invariants(); } void ThreadLocalAllocBuffer::initialize() {
*** 321,337 **** } guarantee(p == top(), "end of last object must match end of space"); } void ThreadLocalAllocBuffer::set_sample_end() { ! size_t heap_words_remaining = pointer_delta(_current_end, _top); size_t bytes_until_sample = myThread()->heap_sampler().bytes_until_sample(); size_t words_until_sample = bytes_until_sample / HeapWordSize;; if (heap_words_remaining > words_until_sample) { HeapWord* new_end = _top + words_until_sample; ! set_current_end(new_end); _bytes_since_last_sample_point = bytes_until_sample; } else { _bytes_since_last_sample_point = heap_words_remaining * HeapWordSize;; } } --- 321,337 ---- } guarantee(p == top(), "end of last object must match end of space"); } void ThreadLocalAllocBuffer::set_sample_end() { ! size_t heap_words_remaining = pointer_delta(_fast_path_end, _top); size_t bytes_until_sample = myThread()->heap_sampler().bytes_until_sample(); size_t words_until_sample = bytes_until_sample / HeapWordSize;; if (heap_words_remaining > words_until_sample) { HeapWord* new_end = _top + words_until_sample; ! set_fast_path_end(new_end); _bytes_since_last_sample_point = bytes_until_sample; } else { _bytes_since_last_sample_point = heap_words_remaining * HeapWordSize;; } }
*** 341,351 **** in_bytes(start_offset()) - in_bytes(Thread::tlab_start_offset())); } void ThreadLocalAllocBuffer::set_back_allocation_end() { ! _current_end = _allocation_end; } HeapWord* ThreadLocalAllocBuffer::allocate_sampled_object(size_t size) { Thread* thread = myThread(); thread->tlab().set_back_allocation_end(); --- 341,351 ---- in_bytes(start_offset()) - in_bytes(Thread::tlab_start_offset())); } void ThreadLocalAllocBuffer::set_back_allocation_end() { ! _fast_path_end = _allocation_end; } HeapWord* ThreadLocalAllocBuffer::allocate_sampled_object(size_t size) { Thread* thread = myThread(); thread->tlab().set_back_allocation_end();
*** 357,367 **** } return result; } ! HeapWord* ThreadLocalAllocBuffer::reserved_end() { return _allocation_end + alignment_reserve(); } GlobalTLABStats::GlobalTLABStats() : _allocating_threads_avg(TLABAllocationWeight) { --- 357,367 ---- } return result; } ! HeapWord* ThreadLocalAllocBuffer::hard_end() { return _allocation_end + alignment_reserve(); } GlobalTLABStats::GlobalTLABStats() : _allocating_threads_avg(TLABAllocationWeight) {
< prev index next >