< prev index next >

src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp

Print this page
rev 48551 : [mq]: heap8
rev 48553 : [mq]: heap14_rebased
rev 48559 : [mq]: heap20
rev 48562 : [mq]: heap23

@@ -27,10 +27,11 @@
 #include "gc/shared/threadLocalAllocBuffer.inline.hpp"
 #include "logging/log.hpp"
 #include "memory/resourceArea.hpp"
 #include "memory/universe.inline.hpp"
 #include "oops/oop.inline.hpp"
+#include "runtime/heapMonitoring.hpp"
 #include "runtime/thread.inline.hpp"
 #include "runtime/threadSMR.hpp"
 #include "utilities/copy.hpp"
 
 // Thread-Local Edens support

@@ -44,10 +45,20 @@
 void ThreadLocalAllocBuffer::clear_before_allocation() {
   _slow_refill_waste += (unsigned)remaining();
   make_parsable(true);   // also retire the TLAB
 }
 
+size_t ThreadLocalAllocBuffer::remaining() {
+  if (current_end() == NULL) {
+    return 0;
+  }
+
+  // TODO: To be deprecated when FastTLABRefill is deprecated.
+  update_end_pointers();
+  return pointer_delta(reserved_end(), top());
+}
+
 void ThreadLocalAllocBuffer::accumulate_statistics_before_gc() {
   global_stats()->initialize();
 
   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
     thread->tlab().accumulate_statistics();

@@ -106,28 +117,33 @@
 // Fills the current tlab with a dummy filler array to create
 // an illusion of a contiguous Eden and optionally retires the tlab.
 // Waste accounting should be done in caller as appropriate; see,
 // for example, clear_before_allocation().
 void ThreadLocalAllocBuffer::make_parsable(bool retire, bool zap) {
-  if (end() != NULL) {
+  if (current_end() != NULL) {
     invariants();
 
     if (retire) {
       myThread()->incr_allocated_bytes(used_bytes());
     }
 
-    CollectedHeap::fill_with_object(top(), hard_end(), retire && zap);
+    // TODO: To be deprecated when FastTLABRefill is deprecated.
+    update_end_pointers();
+    CollectedHeap::fill_with_object(top(), reserved_end(), retire && zap);
 
     if (retire || ZeroTLAB) {  // "Reset" the TLAB
       set_start(NULL);
       set_top(NULL);
       set_pf_top(NULL);
-      set_end(NULL);
+      set_current_end(NULL);
+      set_allocation_end(NULL);
+      set_last_slow_path_end(NULL);
     }
   }
   assert(!(retire || ZeroTLAB)  ||
-         (start() == NULL && end() == NULL && top() == NULL),
+         (start() == NULL && current_end() == NULL && top() == NULL &&
+          _allocation_end == NULL && _last_slow_path_end == NULL),
          "TLAB must be reset");
 }
 
 void ThreadLocalAllocBuffer::resize_all_tlabs() {
   if (ResizeTLAB) {

@@ -169,24 +185,40 @@
                                   HeapWord* top,
                                   size_t    new_size) {
   _number_of_refills++;
   print_stats("fill");
   assert(top <= start + new_size - alignment_reserve(), "size too small");
+
+  // Remember old bytes until sample for the next tlab only if this is our first
+  // actual refill.
+  size_t old_bytes_until_sample = 0;
+  if (_number_of_refills > 1) {
+    old_bytes_until_sample = _bytes_until_sample;
+  }
+
   initialize(start, top, start + new_size - alignment_reserve());
 
+  if (old_bytes_until_sample > 0) {
+    set_bytes_until_sample(old_bytes_until_sample);
+    set_sample_end();
+  }
+
   // Reset amount of internal fragmentation
   set_refill_waste_limit(initial_refill_waste_limit());
 }
 
 void ThreadLocalAllocBuffer::initialize(HeapWord* start,
                                         HeapWord* top,
                                         HeapWord* end) {
   set_start(start);
   set_top(top);
   set_pf_top(top);
-  set_end(end);
+  set_current_end(end);
+  set_allocation_end(end);
+  set_last_slow_path_end(end);
   invariants();
+  _bytes_until_sample = 0;
 }
 
 void ThreadLocalAllocBuffer::initialize() {
   initialize(NULL,                    // start
              NULL,                    // top

@@ -304,16 +336,112 @@
     p += oop(p)->size();
   }
   guarantee(p == top(), "end of last object must match end of space");
 }
 
+void ThreadLocalAllocBuffer::set_sample_end() {
+  size_t heap_words_remaining = pointer_delta(_current_end, _top);
+  size_t bytes_left = _bytes_until_sample;
+  size_t words_until_sample = bytes_left / HeapWordSize;
+
+  if (heap_words_remaining > words_until_sample) {
+    HeapWord* new_end = _top + words_until_sample;
+    set_current_end(new_end);
+    set_last_slow_path_end(new_end);
+    set_bytes_until_sample(0);
+  } else {
+    bytes_left -= heap_words_remaining * HeapWordSize;
+    set_bytes_until_sample(bytes_left);
+  }
+}
+
+void ThreadLocalAllocBuffer::pick_next_sample(size_t overflowed_words) {
+  if (!HeapMonitoring::enabled()) {
+    return;
+  }
+
+  if (_bytes_until_sample == 0) {
+    HeapMonitoring::pick_next_sample(&_bytes_until_sample);
+  }
+
+  if (overflowed_words > 0) {
+    // Try to correct sample size by removing extra space from last allocation.
+    if (_bytes_until_sample > overflowed_words * HeapWordSize) {
+      set_bytes_until_sample(_bytes_until_sample - overflowed_words * HeapWordSize);
+    }
+  }
+
+  set_sample_end();
+
+  log_trace(gc, tlab)("TLAB picked next sample: thread: " INTPTR_FORMAT " [id: %2d]"
+                      " start: " INTPTR_FORMAT " top: " INTPTR_FORMAT " end: "
+                      INTPTR_FORMAT " allocation_end:"
+                      INTPTR_FORMAT " last_slow_path_end: " INTPTR_FORMAT,
+                      p2i(myThread()), myThread()->osthread()->thread_id(),
+                      p2i(start()), p2i(top()), p2i(current_end()),
+                      p2i(_allocation_end), p2i(_last_slow_path_end));
+}
+
 Thread* ThreadLocalAllocBuffer::myThread() {
   return (Thread*)(((char *)this) +
                    in_bytes(start_offset()) -
                    in_bytes(Thread::tlab_start_offset()));
 }
 
+void ThreadLocalAllocBuffer::set_back_allocation_end() {
+  // Did a fast TLAB refill occur?
+  if (_last_slow_path_end != _current_end) {
+    // Fix up the actual end to be now the end of this TLAB.
+    _last_slow_path_end = _current_end;
+    _allocation_end = _current_end;
+  } else {
+    _current_end = _allocation_end;
+  }
+}
+
+void ThreadLocalAllocBuffer::handle_sample(Thread* thread, HeapWord* result,
+                                           size_t size_in_bytes) {
+  if (!HeapMonitoring::enabled()) {
+    return;
+  }
+
+  if (_bytes_until_sample < size_in_bytes) {
+    HeapMonitoring::object_alloc_do_sample(thread,
+                                           reinterpret_cast<oopDesc*>(result),
+                                           size_in_bytes);
+  }
+
+  update_tlab_sample_point(size_in_bytes);
+}
+
+void ThreadLocalAllocBuffer::update_tlab_sample_point(size_t size_in_bytes) {
+  if (_bytes_until_sample > size_in_bytes) {
+    _bytes_until_sample -= size_in_bytes;
+    return;
+  }
+
+  // We sampled here, so reset it all and start a new sample point.
+  set_bytes_until_sample(0);
+  set_back_allocation_end();
+  pick_next_sample();
+}
+
+void ThreadLocalAllocBuffer::update_end_pointers() {
+  // Did a fast TLAB refill occur? (This will be deprecated when fast TLAB
+  // refill disappears).
+  if (_last_slow_path_end != _current_end) {
+    // Fix up the last slow path end to be now the end of this TLAB.
+    _last_slow_path_end = _current_end;
+    _allocation_end = _current_end;
+  }
+}
+
+HeapWord* ThreadLocalAllocBuffer::reserved_end() {
+  assert (_last_slow_path_end == _current_end,
+          "Have to call update_end_pointers before reserved_end.");
+  return _allocation_end + alignment_reserve();
+}
 
 GlobalTLABStats::GlobalTLABStats() :
   _allocating_threads_avg(TLABAllocationWeight) {
 
   initialize();
< prev index next >