< prev index next >

src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp

Print this page
rev 47223 : [mq]: heapz8
rev 47224 : [mq]: heap9a

@@ -27,10 +27,11 @@
 #include "gc/shared/threadLocalAllocBuffer.inline.hpp"
 #include "logging/log.hpp"
 #include "memory/resourceArea.hpp"
 #include "memory/universe.inline.hpp"
 #include "oops/oop.inline.hpp"
+#include "runtime/heapMonitoring.hpp"
 #include "runtime/thread.inline.hpp"
 #include "utilities/copy.hpp"
 
 // Thread-Local Edens support
 

@@ -119,14 +120,17 @@
     if (retire || ZeroTLAB) {  // "Reset" the TLAB
       set_start(NULL);
       set_top(NULL);
       set_pf_top(NULL);
       set_end(NULL);
+      set_actual_end(NULL);
+      set_slow_path_end(NULL);
     }
   }
   assert(!(retire || ZeroTLAB)  ||
-         (start() == NULL && end() == NULL && top() == NULL),
+         (start() == NULL && end() == NULL && top() == NULL &&
+          actual_end() == NULL && slow_path_end() == NULL),
          "TLAB must be reset");
 }
 
 void ThreadLocalAllocBuffer::resize_all_tlabs() {
   if (ResizeTLAB) {

@@ -168,12 +172,25 @@
                                   HeapWord* top,
                                   size_t    new_size) {
   _number_of_refills++;
   print_stats("fill");
   assert(top <= start + new_size - alignment_reserve(), "size too small");
+
+  // Remember old bytes until sample for the next tlab only if this is our first
+  // actual refill.
+  size_t old_bytes_until_sample = 0;
+  if (_number_of_refills > 1) {
+    old_bytes_until_sample = bytes_until_sample();
+  }
+
   initialize(start, top, start + new_size - alignment_reserve());
 
+  if (old_bytes_until_sample > 0) {
+    set_bytes_until_sample(old_bytes_until_sample);
+    set_sample_end();
+  }
+
   // Reset amount of internal fragmentation
   set_refill_waste_limit(initial_refill_waste_limit());
 }
 
 void ThreadLocalAllocBuffer::initialize(HeapWord* start,

@@ -181,11 +198,14 @@
                                         HeapWord* end) {
   set_start(start);
   set_top(top);
   set_pf_top(top);
   set_end(end);
+  set_actual_end(end);
+  set_slow_path_end(end);
   invariants();
+  _bytes_until_sample = 0;
 }
 
 void ThreadLocalAllocBuffer::initialize() {
   initialize(NULL,                    // start
              NULL,                    // top

@@ -304,17 +324,103 @@
     p += oop(p)->size();
   }
   guarantee(p == top(), "end of last object must match end of space");
 }
 
+void ThreadLocalAllocBuffer::set_sample_end() {
+  size_t heap_words_remaining = _end - _top;
+  size_t bytes_left = bytes_until_sample();
+  size_t words_until_sample = bytes_left / HeapWordSize;
+
+  if (heap_words_remaining > words_until_sample) {
+    HeapWord* new_end = _top + words_until_sample;
+    set_end(new_end);
+    set_slow_path_end(new_end);
+    set_bytes_until_sample(0);
+  } else {
+    bytes_left -= heap_words_remaining * HeapWordSize;
+    set_bytes_until_sample(bytes_left);
+  }
+}
+
+void ThreadLocalAllocBuffer::pick_next_sample(size_t diff) {
+  if (!HeapMonitoring::enabled()) {
+    return;
+  }
+
+  if (bytes_until_sample() == 0) {
+    HeapMonitoring::pick_next_sample(bytes_until_sample_addr());
+  }
+
+  if (diff > 0) {
+    // Try to correct sample size by removing extra space from last allocation.
+    if (bytes_until_sample() > diff * HeapWordSize) {
+      set_bytes_until_sample(bytes_until_sample() - diff * HeapWordSize);
+    }
+  }
+
+  set_sample_end();
+
+  log_trace(gc, tlab)("TLAB picked next sample: thread: " INTPTR_FORMAT " [id: %2d]"
+                      " start: %p  top: %p end: %p actual_end: %p slow_path_end: %p",
+                      p2i(myThread()), myThread()->osthread()->thread_id(),
+                      start(), top(), end(),
+                      actual_end(), slow_path_end());
+}
+
 Thread* ThreadLocalAllocBuffer::myThread() {
   return (Thread*)(((char *)this) +
                    in_bytes(start_offset()) -
                    in_bytes(Thread::tlab_start_offset()));
 }
 
+void ThreadLocalAllocBuffer::set_back_actual_end() {
+  // Did a fast TLAB refill occur?
+  if (_slow_path_end != _end) {
+    // Fix up the actual end to be now the end of this TLAB.
+    _slow_path_end = _end;
+    _actual_end = _end;
+  } else {
+    _end = _actual_end;
+  }
+}
+
+void ThreadLocalAllocBuffer::handle_sample(Thread* thread, HeapWord* result,
+                                           size_t size) {
+  if (!HeapMonitoring::enabled()) {
+    return;
+  }
+
+  size_t size_in_bytes = size * HeapWordSize;
+  if (bytes_until_sample() > size_in_bytes) {
+    set_bytes_until_sample(bytes_until_sample() - size_in_bytes);
+  } else {
+    // Technically this is not exactly right, we probably should remember how many bytes are
+    // negative probably to then reduce our next sample size.
+    set_bytes_until_sample(0);
+  }
+
+  // Should we sample now?
+  if (should_sample()) {
+    HeapMonitoring::object_alloc_do_sample(thread,
+                                           reinterpret_cast<oopDesc*>(result),
+                                           size_in_bytes);
+    set_back_actual_end();
+    pick_next_sample();
+  }
+}
+
+HeapWord* ThreadLocalAllocBuffer::hard_end() {
+  // Did a fast TLAB refill occur?
+  if (_slow_path_end != _end) {
+    // Fix up the actual end to be now the end of this TLAB.
+    _slow_path_end = _end;
+    _actual_end = _end;
+  }
 
+  return _actual_end + alignment_reserve();
+}
 GlobalTLABStats::GlobalTLABStats() :
   _allocating_threads_avg(TLABAllocationWeight) {
 
   initialize();
 
< prev index next >