< prev index next >
src/share/vm/gc/shared/threadLocalAllocBuffer.cpp
Print this page
*** 27,36 ****
--- 27,37 ----
#include "gc/shared/threadLocalAllocBuffer.inline.hpp"
#include "logging/log.hpp"
#include "memory/resourceArea.hpp"
#include "memory/universe.inline.hpp"
#include "oops/oop.inline.hpp"
+ #include "runtime/heapMonitoring.hpp"
#include "runtime/thread.inline.hpp"
#include "utilities/copy.hpp"
// Thread-Local Edens support
*** 119,128 ****
--- 120,131 ----
if (retire || ZeroTLAB) { // "Reset" the TLAB
set_start(NULL);
set_top(NULL);
set_pf_top(NULL);
set_end(NULL);
+ set_actual_end(NULL);
+ set_slow_path_end(NULL);
}
}
assert(!(retire || ZeroTLAB) ||
(start() == NULL && end() == NULL && top() == NULL),
"TLAB must be reset");
*** 181,191 ****
--- 184,197 ----
HeapWord* end) {
set_start(start);
set_top(top);
set_pf_top(top);
set_end(end);
+ set_actual_end(end);
+ set_slow_path_end(end);
invariants();
+ _bytes_until_sample = 0;
}
void ThreadLocalAllocBuffer::initialize() {
initialize(NULL, // start
NULL, // top
*** 304,320 ****
--- 310,387 ----
p += oop(p)->size();
}
guarantee(p == top(), "end of last object must match end of space");
}
+ void ThreadLocalAllocBuffer::pick_next_sample() {
+ if (!HeapMonitoring::enabled()) {
+ return;
+ }
+
+ if (bytes_until_sample() == 0) {
+ HeapMonitoring::pick_next_sample(bytes_until_sample_addr());
+ }
+
+ // Finally, fix up the sampling bytes left and _end.
+ size_t heap_words_remaining = _end - _top;
+ size_t bytes_left = bytes_until_sample();
+ size_t words_until_sample = bytes_left / HeapWordSize;
+
+ if (heap_words_remaining > words_until_sample) {
+ set_end(_top + words_until_sample);
+ set_bytes_until_sample(0);
+ } else {
+ bytes_left -= heap_words_remaining * HeapWordSize;
+ set_bytes_until_sample(bytes_left);
+ }
+ }
+
Thread* ThreadLocalAllocBuffer::myThread() {
return (Thread*)(((char *)this) +
in_bytes(start_offset()) -
in_bytes(Thread::tlab_start_offset()));
}
+ void ThreadLocalAllocBuffer::set_back_actual_end() {
+ // Did a fast TLAB refill occur?
+ if (_slow_path_end != _end) {
+ // Fix up the actual end to be now the end of this TLAB.
+ _slow_path_end = _end;
+ _actual_end = _end;
+ } else {
+ _end = _actual_end;
+ }
+ }
+
+ void ThreadLocalAllocBuffer::handle_sample(Thread* thread, HeapWord* result,
+ size_t size) {
+ if (!HeapMonitoring::enabled()) {
+ return;
+ }
+
+ set_bytes_until_sample(bytes_until_sample() - size);
+ // Should we sample now?
+ set_back_actual_end();
+ if (should_sample()) {
+ HeapMonitoring::object_alloc_do_sample(thread,
+ reinterpret_cast<oopDesc*>(result),
+ size);
+ }
+ pick_next_sample();
+ }
+
+ HeapWord* ThreadLocalAllocBuffer::hard_end() {
+ // Did a fast TLAB refill occur?
+ if (_slow_path_end != _end) {
+ // Fix up the actual end to be now the end of this TLAB.
+ _slow_path_end = _end;
+ _actual_end = _end;
+ }
+
+ return _actual_end + alignment_reserve();
+ }
GlobalTLABStats::GlobalTLABStats() :
_allocating_threads_avg(TLABAllocationWeight) {
initialize();
< prev index next >