< prev index next >

src/hotspot/share/gc/g1/g1ParScanThreadState.inline.hpp

Print this page
rev 49680 : imported patch 6672778-partial-queue-trimming
rev 49681 : imported patch 6672778-refactoring
rev 49682 : [mq]: 6672778-stefanj-review

@@ -27,10 +27,11 @@
 
 #include "gc/g1/g1ParScanThreadState.hpp"
 #include "gc/g1/g1RemSet.hpp"
 #include "oops/access.inline.hpp"
 #include "oops/oop.inline.hpp"
+#include "utilities/ticks.inline.hpp"
 
 template <class T> void G1ParScanThreadState::do_oop_evac(T* p) {
   // Reference should not be NULL here as such are never pushed to the task queue.
   oop obj = RawAccess<OOP_NOT_NULL>::oop_load(p);
 

@@ -149,6 +150,48 @@
     // we drain the queues as necessary.
     trim_queue();
   }
 }
 
+inline bool G1ParScanThreadState::needs_partial_trimming() const {
+  return !_refs->overflow_empty() || _refs->size() > _stack_drain_upper_threshold;
+}
+
+inline bool G1ParScanThreadState::is_partially_trimmed() const {
+  return _refs->overflow_empty() && _refs->size() <= _stack_drain_lower_threshold;
+}
+
+inline void G1ParScanThreadState::trim_queue_to_threshold(uint threshold) {
+  StarTask ref;
+  // Drain the overflow stack first, so other threads can potentially steal.
+  while (_refs->pop_overflow(ref)) {
+    if (!_refs->try_push_to_taskqueue(ref)) {
+      dispatch_reference(ref);
+    }
+  }
+
+  while (_refs->pop_local(ref, threshold)) {
+    dispatch_reference(ref);
+  }
+}
+
+inline void G1ParScanThreadState::trim_queue_partially() {
+  if (!needs_partial_trimming()) {
+    return;
+  }
+
+  const Ticks start = Ticks::now();
+  do {
+    trim_queue_to_threshold(_stack_drain_lower_threshold);
+  } while (!is_partially_trimmed());
+  _trim_ticks += Ticks::now() - start;
+}
+
+inline Tickspan G1ParScanThreadState::trim_ticks() {
+  return _trim_ticks;
+}
+
+inline void G1ParScanThreadState::reset_trim_ticks() {
+  _trim_ticks = Tickspan();
+}
+
 #endif // SHARE_VM_GC_G1_G1PARSCANTHREADSTATE_INLINE_HPP
< prev index next >