< prev index next >

src/hotspot/share/utilities/lockFreeStack.hpp

Print this page




  48 // LockFreeStacks support polymorphic elements.  Because the objects
  49 // in a stack are externally managed, rather than being embedded
  50 // values in the stack, the actual type of such objects may be more
  51 // specific than the stack's element type.
  52 //
  53 // \tparam T is the class of the elements in the stack.
  54 //
  55 // \tparam next_ptr is a function pointer.  Applying this function to
  56 // an object of type T must return a pointer to the list entry member
  57 // of the object associated with the LockFreeStack type.
  58 template<typename T, T* volatile* (*next_ptr)(T&)>
  59 class LockFreeStack {
  60   T* volatile _top;
  61 
  62   void prepend_impl(T* first, T* last) {
  63     T* cur = top();
  64     T* old;
  65     do {
  66       old = cur;
  67       set_next(*last, cur);
  68       cur = Atomic::cmpxchg(first, &_top, cur);
  69     } while (old != cur);
  70   }
  71 
  72   // Noncopyable.
  73   LockFreeStack(const LockFreeStack&);
  74   LockFreeStack& operator=(const LockFreeStack&);
  75 
  76 public:
  77   LockFreeStack() : _top(NULL) {}
  78   ~LockFreeStack() { assert(empty(), "stack not empty"); }
  79 
  80   // Atomically removes the top object from this stack and returns a
  81   // pointer to that object, or NULL if this stack is empty. Acts as a
  82   // full memory barrier. Subject to ABA behavior; callers must ensure
  83   // usage is safe.
  84   T* pop() {
  85     T* result = top();
  86     T* old;
  87     do {
  88       old = result;
  89       T* new_top = NULL;
  90       if (result != NULL) {
  91         new_top = next(*result);
  92       }
  93       // CAS even on empty pop, for consistent membar bahavior.
  94       result = Atomic::cmpxchg(new_top, &_top, result);
  95     } while (result != old);
  96     if (result != NULL) {
  97       set_next(*result, NULL);
  98     }
  99     return result;
 100   }
 101 
 102   // Atomically exchange the list of elements with NULL, returning the old
 103   // list of elements.  Acts as a full memory barrier.
 104   // postcondition: empty()
 105   T* pop_all() {
 106     return Atomic::xchg((T*)NULL, &_top);
 107   }
 108 
 109   // Atomically adds value to the top of this stack.  Acts as a full
 110   // memory barrier.
 111   void push(T& value) {
 112     assert(next(value) == NULL, "precondition");
 113     prepend_impl(&value, &value);
 114   }
 115 
 116   // Atomically adds the list of objects (designated by first and
 117   // last) before the objects already in this stack, in the same order
 118   // as in the list. Acts as a full memory barrier.
 119   // precondition: next(last) == NULL.
 120   // postcondition: top() == &first, next(last) == old top().
 121   void prepend(T& first, T& last) {
 122     assert(next(last) == NULL, "precondition");
 123 #ifdef ASSERT
 124     for (T* p = &first; p != &last; p = next(*p)) {
 125       assert(p != NULL, "invalid prepend list");
 126     }


 153   // pops while the length is being determined.
 154   size_t length() const {
 155     size_t result = 0;
 156     for (const T* current = top(); current != NULL; current = next(*current)) {
 157       ++result;
 158     }
 159     return result;
 160   }
 161 
 162   // Return the entry following value in the list used by the
 163   // specialized LockFreeStack class.
 164   static T* next(const T& value) {
 165     return Atomic::load(next_ptr(const_cast<T&>(value)));
 166   }
 167 
 168   // Set the entry following value to new_next in the list used by the
 169   // specialized LockFreeStack class.  Not thread-safe; in particular,
 170   // if value is in an instance of this specialization of LockFreeStack,
 171   // there must be no concurrent push or pop operations on that stack.
 172   static void set_next(T& value, T* new_next) {
 173     Atomic::store(new_next, next_ptr(value));
 174   }
 175 };
 176 
 177 #endif // SHARE_UTILITIES_LOCKFREESTACK_HPP


  48 // LockFreeStacks support polymorphic elements.  Because the objects
  49 // in a stack are externally managed, rather than being embedded
  50 // values in the stack, the actual type of such objects may be more
  51 // specific than the stack's element type.
  52 //
  53 // \tparam T is the class of the elements in the stack.
  54 //
  55 // \tparam next_ptr is a function pointer.  Applying this function to
  56 // an object of type T must return a pointer to the list entry member
  57 // of the object associated with the LockFreeStack type.
  58 template<typename T, T* volatile* (*next_ptr)(T&)>
  59 class LockFreeStack {
  60   T* volatile _top;
  61 
  62   void prepend_impl(T* first, T* last) {
  63     T* cur = top();
  64     T* old;
  65     do {
  66       old = cur;
  67       set_next(*last, cur);
  68       cur = Atomic::cmpxchg(&_top, cur, first);
  69     } while (old != cur);
  70   }
  71 
  72   // Noncopyable.
  73   LockFreeStack(const LockFreeStack&);
  74   LockFreeStack& operator=(const LockFreeStack&);
  75 
  76 public:
  77   LockFreeStack() : _top(NULL) {}
  78   ~LockFreeStack() { assert(empty(), "stack not empty"); }
  79 
  80   // Atomically removes the top object from this stack and returns a
  81   // pointer to that object, or NULL if this stack is empty. Acts as a
  82   // full memory barrier. Subject to ABA behavior; callers must ensure
  83   // usage is safe.
  84   T* pop() {
  85     T* result = top();
  86     T* old;
  87     do {
  88       old = result;
  89       T* new_top = NULL;
  90       if (result != NULL) {
  91         new_top = next(*result);
  92       }
  93       // CAS even on empty pop, for consistent membar bahavior.
  94       result = Atomic::cmpxchg(&_top, result, new_top);
  95     } while (result != old);
  96     if (result != NULL) {
  97       set_next(*result, NULL);
  98     }
  99     return result;
 100   }
 101 
 102   // Atomically exchange the list of elements with NULL, returning the old
 103   // list of elements.  Acts as a full memory barrier.
 104   // postcondition: empty()
 105   T* pop_all() {
 106     return Atomic::xchg(&_top, (T*)NULL);
 107   }
 108 
 109   // Atomically adds value to the top of this stack.  Acts as a full
 110   // memory barrier.
 111   void push(T& value) {
 112     assert(next(value) == NULL, "precondition");
 113     prepend_impl(&value, &value);
 114   }
 115 
 116   // Atomically adds the list of objects (designated by first and
 117   // last) before the objects already in this stack, in the same order
 118   // as in the list. Acts as a full memory barrier.
 119   // precondition: next(last) == NULL.
 120   // postcondition: top() == &first, next(last) == old top().
 121   void prepend(T& first, T& last) {
 122     assert(next(last) == NULL, "precondition");
 123 #ifdef ASSERT
 124     for (T* p = &first; p != &last; p = next(*p)) {
 125       assert(p != NULL, "invalid prepend list");
 126     }


 153   // pops while the length is being determined.
 154   size_t length() const {
 155     size_t result = 0;
 156     for (const T* current = top(); current != NULL; current = next(*current)) {
 157       ++result;
 158     }
 159     return result;
 160   }
 161 
 162   // Return the entry following value in the list used by the
 163   // specialized LockFreeStack class.
 164   static T* next(const T& value) {
 165     return Atomic::load(next_ptr(const_cast<T&>(value)));
 166   }
 167 
 168   // Set the entry following value to new_next in the list used by the
 169   // specialized LockFreeStack class.  Not thread-safe; in particular,
 170   // if value is in an instance of this specialization of LockFreeStack,
 171   // there must be no concurrent push or pop operations on that stack.
 172   static void set_next(T& value, T* new_next) {
 173     Atomic::store(next_ptr(value), new_next);
 174   }
 175 };
 176 
 177 #endif // SHARE_UTILITIES_LOCKFREESTACK_HPP
< prev index next >