< prev index next >

src/hotspot/share/gc/shared/taskqueue.inline.hpp

Print this page




  51 inline void GenericTaskQueue<E, F, N>::initialize() {
  52   _elems = ArrayAllocator<E>::allocate(N, F);
  53 }
  54 
  55 template<class E, MEMFLAGS F, unsigned int N>
  56 inline GenericTaskQueue<E, F, N>::~GenericTaskQueue() {
  57   ArrayAllocator<E>::free(const_cast<E*>(_elems), N);
  58 }
  59 
  60 template<class E, MEMFLAGS F, unsigned int N>
  61 bool GenericTaskQueue<E, F, N>::push_slow(E t, uint dirty_n_elems) {
  62   if (dirty_n_elems == N - 1) {
  63     // Actually means 0, so do the push.
  64     uint localBot = _bottom;
  65     // g++ complains if the volatile result of the assignment is
  66     // unused, so we cast the volatile away.  We cannot cast directly
  67     // to void, because gcc treats that as not using the result of the
  68     // assignment.  However, casting to E& means that we trigger an
  69     // unused-value warning.  So, we cast the E& to void.
  70     (void)const_cast<E&>(_elems[localBot] = t);
  71     OrderAccess::release_store(&_bottom, increment_index(localBot));
  72     TASKQUEUE_STATS_ONLY(stats.record_push());
  73     return true;
  74   }
  75   return false;
  76 }
  77 
  78 template<class E, MEMFLAGS F, unsigned int N> inline bool
  79 GenericTaskQueue<E, F, N>::push(E t) {
  80   uint localBot = _bottom;
  81   assert(localBot < N, "_bottom out of range.");
  82   idx_t top = _age.top();
  83   uint dirty_n_elems = dirty_size(localBot, top);
  84   assert(dirty_n_elems < N, "n_elems out of range.");
  85   if (dirty_n_elems < max_elems()) {
  86     // g++ complains if the volatile result of the assignment is
  87     // unused, so we cast the volatile away.  We cannot cast directly
  88     // to void, because gcc treats that as not using the result of the
  89     // assignment.  However, casting to E& means that we trigger an
  90     // unused-value warning.  So, we cast the E& to void.
  91     (void) const_cast<E&>(_elems[localBot] = t);
  92     OrderAccess::release_store(&_bottom, increment_index(localBot));
  93     TASKQUEUE_STATS_ONLY(stats.record_push());
  94     return true;
  95   } else {
  96     return push_slow(t, dirty_n_elems);
  97   }
  98 }
  99 
 100 template <class E, MEMFLAGS F, unsigned int N>
 101 inline bool OverflowTaskQueue<E, F, N>::push(E t)
 102 {
 103   if (!taskqueue_t::push(t)) {
 104     overflow_stack()->push(t);
 105     TASKQUEUE_STATS_ONLY(stats.record_overflow(overflow_stack()->size()));
 106   }
 107   return true;
 108 }
 109 
 110 template <class E, MEMFLAGS F, unsigned int N>
 111 inline bool OverflowTaskQueue<E, F, N>::try_push_to_taskqueue(E t) {
 112   return taskqueue_t::push(t);


 193   }
 194 }
 195 
 196 template <class E, MEMFLAGS F, unsigned int N>
 197 bool OverflowTaskQueue<E, F, N>::pop_overflow(E& t)
 198 {
 199   if (overflow_empty()) return false;
 200   t = overflow_stack()->pop();
 201   return true;
 202 }
 203 
 204 template<class E, MEMFLAGS F, unsigned int N>
 205 bool GenericTaskQueue<E, F, N>::pop_global(volatile E& t) {
 206   Age oldAge = _age.get();
 207   // Architectures with weak memory model require a barrier here
 208   // to guarantee that bottom is not older than age,
 209   // which is crucial for the correctness of the algorithm.
 210 #ifndef CPU_MULTI_COPY_ATOMIC
 211   OrderAccess::fence();
 212 #endif
 213   uint localBot = OrderAccess::load_acquire(&_bottom);
 214   uint n_elems = size(localBot, oldAge.top());
 215   if (n_elems == 0) {
 216     return false;
 217   }
 218 
 219   // g++ complains if the volatile result of the assignment is
 220   // unused, so we cast the volatile away.  We cannot cast directly
 221   // to void, because gcc treats that as not using the result of the
 222   // assignment.  However, casting to E& means that we trigger an
 223   // unused-value warning.  So, we cast the E& to void.
 224   (void) const_cast<E&>(t = _elems[oldAge.top()]);
 225   Age newAge(oldAge);
 226   newAge.increment();
 227   Age resAge = _age.cmpxchg(newAge, oldAge);
 228 
 229   // Note that using "_bottom" here might fail, since a pop_local might
 230   // have decremented it.
 231   assert(dirty_size(localBot, newAge.top()) != N - 1, "sanity");
 232   return resAge == oldAge;
 233 }




  51 inline void GenericTaskQueue<E, F, N>::initialize() {
  52   _elems = ArrayAllocator<E>::allocate(N, F);
  53 }
  54 
  55 template<class E, MEMFLAGS F, unsigned int N>
  56 inline GenericTaskQueue<E, F, N>::~GenericTaskQueue() {
  57   ArrayAllocator<E>::free(const_cast<E*>(_elems), N);
  58 }
  59 
  60 template<class E, MEMFLAGS F, unsigned int N>
  61 bool GenericTaskQueue<E, F, N>::push_slow(E t, uint dirty_n_elems) {
  62   if (dirty_n_elems == N - 1) {
  63     // Actually means 0, so do the push.
  64     uint localBot = _bottom;
  65     // g++ complains if the volatile result of the assignment is
  66     // unused, so we cast the volatile away.  We cannot cast directly
  67     // to void, because gcc treats that as not using the result of the
  68     // assignment.  However, casting to E& means that we trigger an
  69     // unused-value warning.  So, we cast the E& to void.
  70     (void)const_cast<E&>(_elems[localBot] = t);
  71     Atomic::release_store(&_bottom, increment_index(localBot));
  72     TASKQUEUE_STATS_ONLY(stats.record_push());
  73     return true;
  74   }
  75   return false;
  76 }
  77 
  78 template<class E, MEMFLAGS F, unsigned int N> inline bool
  79 GenericTaskQueue<E, F, N>::push(E t) {
  80   uint localBot = _bottom;
  81   assert(localBot < N, "_bottom out of range.");
  82   idx_t top = _age.top();
  83   uint dirty_n_elems = dirty_size(localBot, top);
  84   assert(dirty_n_elems < N, "n_elems out of range.");
  85   if (dirty_n_elems < max_elems()) {
  86     // g++ complains if the volatile result of the assignment is
  87     // unused, so we cast the volatile away.  We cannot cast directly
  88     // to void, because gcc treats that as not using the result of the
  89     // assignment.  However, casting to E& means that we trigger an
  90     // unused-value warning.  So, we cast the E& to void.
  91     (void) const_cast<E&>(_elems[localBot] = t);
  92     Atomic::release_store(&_bottom, increment_index(localBot));
  93     TASKQUEUE_STATS_ONLY(stats.record_push());
  94     return true;
  95   } else {
  96     return push_slow(t, dirty_n_elems);
  97   }
  98 }
  99 
 100 template <class E, MEMFLAGS F, unsigned int N>
 101 inline bool OverflowTaskQueue<E, F, N>::push(E t)
 102 {
 103   if (!taskqueue_t::push(t)) {
 104     overflow_stack()->push(t);
 105     TASKQUEUE_STATS_ONLY(stats.record_overflow(overflow_stack()->size()));
 106   }
 107   return true;
 108 }
 109 
 110 template <class E, MEMFLAGS F, unsigned int N>
 111 inline bool OverflowTaskQueue<E, F, N>::try_push_to_taskqueue(E t) {
 112   return taskqueue_t::push(t);


 193   }
 194 }
 195 
 196 template <class E, MEMFLAGS F, unsigned int N>
 197 bool OverflowTaskQueue<E, F, N>::pop_overflow(E& t)
 198 {
 199   if (overflow_empty()) return false;
 200   t = overflow_stack()->pop();
 201   return true;
 202 }
 203 
 204 template<class E, MEMFLAGS F, unsigned int N>
 205 bool GenericTaskQueue<E, F, N>::pop_global(volatile E& t) {
 206   Age oldAge = _age.get();
 207   // Architectures with weak memory model require a barrier here
 208   // to guarantee that bottom is not older than age,
 209   // which is crucial for the correctness of the algorithm.
 210 #ifndef CPU_MULTI_COPY_ATOMIC
 211   OrderAccess::fence();
 212 #endif
 213   uint localBot = Atomic::load_acquire(&_bottom);
 214   uint n_elems = size(localBot, oldAge.top());
 215   if (n_elems == 0) {
 216     return false;
 217   }
 218 
 219   // g++ complains if the volatile result of the assignment is
 220   // unused, so we cast the volatile away.  We cannot cast directly
 221   // to void, because gcc treats that as not using the result of the
 222   // assignment.  However, casting to E& means that we trigger an
 223   // unused-value warning.  So, we cast the E& to void.
 224   (void) const_cast<E&>(t = _elems[oldAge.top()]);
 225   Age newAge(oldAge);
 226   newAge.increment();
 227   Age resAge = _age.cmpxchg(newAge, oldAge);
 228 
 229   // Note that using "_bottom" here might fail, since a pop_local might
 230   // have decremented it.
 231   assert(dirty_size(localBot, newAge.top()) != N - 1, "sanity");
 232   return resAge == oldAge;
 233 }


< prev index next >