< prev index next >

src/hotspot/share/oops/accessBackend.inline.hpp

Print this page
rev 56016 : 8229422: Taskqueue: Outdated selection of weak memory model platforms
Reviewed-by:


 114 template <typename T>
 115 inline T RawAccessBarrier<decorators>::oop_atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 116   return oop_atomic_xchg(new_value, field_addr(base, offset));
 117 }
 118 
 119 template <DecoratorSet decorators>
 120 template <typename T>
 121 inline bool RawAccessBarrier<decorators>::oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 122                                                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 123                                                         size_t length) {
 124   return arraycopy(src_obj, src_offset_in_bytes, src_raw,
 125                    dst_obj, dst_offset_in_bytes, dst_raw,
 126                    length);
 127 }
 128 
 129 template <DecoratorSet decorators>
 130 template <DecoratorSet ds, typename T>
 131 inline typename EnableIf<
 132   HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 133 RawAccessBarrier<decorators>::load_internal(void* addr) {
 134   if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
 135     OrderAccess::fence();
 136   }
 137   return OrderAccess::load_acquire(reinterpret_cast<const volatile T*>(addr));
 138 }
 139 
 140 template <DecoratorSet decorators>
 141 template <DecoratorSet ds, typename T>
 142 inline typename EnableIf<
 143   HasDecorator<ds, MO_ACQUIRE>::value, T>::type
 144 RawAccessBarrier<decorators>::load_internal(void* addr) {
 145   return OrderAccess::load_acquire(reinterpret_cast<const volatile T*>(addr));
 146 }
 147 
 148 template <DecoratorSet decorators>
 149 template <DecoratorSet ds, typename T>
 150 inline typename EnableIf<
 151   HasDecorator<ds, MO_RELAXED>::value, T>::type
 152 RawAccessBarrier<decorators>::load_internal(void* addr) {
 153   return Atomic::load(reinterpret_cast<const volatile T*>(addr));
 154 }




 114 template <typename T>
 115 inline T RawAccessBarrier<decorators>::oop_atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
 116   return oop_atomic_xchg(new_value, field_addr(base, offset));
 117 }
 118 
 119 template <DecoratorSet decorators>
 120 template <typename T>
 121 inline bool RawAccessBarrier<decorators>::oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 122                                                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 123                                                         size_t length) {
 124   return arraycopy(src_obj, src_offset_in_bytes, src_raw,
 125                    dst_obj, dst_offset_in_bytes, dst_raw,
 126                    length);
 127 }
 128 
 129 template <DecoratorSet decorators>
 130 template <DecoratorSet ds, typename T>
 131 inline typename EnableIf<
 132   HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 133 RawAccessBarrier<decorators>::load_internal(void* addr) {
 134   if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) {
 135     OrderAccess::fence();
 136   }
 137   return OrderAccess::load_acquire(reinterpret_cast<const volatile T*>(addr));
 138 }
 139 
 140 template <DecoratorSet decorators>
 141 template <DecoratorSet ds, typename T>
 142 inline typename EnableIf<
 143   HasDecorator<ds, MO_ACQUIRE>::value, T>::type
 144 RawAccessBarrier<decorators>::load_internal(void* addr) {
 145   return OrderAccess::load_acquire(reinterpret_cast<const volatile T*>(addr));
 146 }
 147 
 148 template <DecoratorSet decorators>
 149 template <DecoratorSet ds, typename T>
 150 inline typename EnableIf<
 151   HasDecorator<ds, MO_RELAXED>::value, T>::type
 152 RawAccessBarrier<decorators>::load_internal(void* addr) {
 153   return Atomic::load(reinterpret_cast<const volatile T*>(addr));
 154 }


< prev index next >