< prev index next >

src/os_cpu/linux_aarch64/vm/atomic_linux_aarch64.hpp

Print this page
rev 13495 : imported patch add_linux_aarch64


  30 
  31 // Implementation of class atomic
  32 
  33 #define FULL_MEM_BARRIER  __sync_synchronize()
  34 #define READ_MEM_BARRIER  __atomic_thread_fence(__ATOMIC_ACQUIRE);
  35 #define WRITE_MEM_BARRIER __atomic_thread_fence(__ATOMIC_RELEASE);
  36 
  37 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
  38 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
  39 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
  40 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
  41 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
  42 
  43 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
  44 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
  45 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
  46 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
  47 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
  48 
  49 
  50 inline jint Atomic::add(jint add_value, volatile jint* dest)


  51 {


  52  return __sync_add_and_fetch(dest, add_value);
  53 }

  54 
  55 inline void Atomic::inc(volatile jint* dest)
  56 {
  57  add(1, dest);
  58 }
  59 
  60 inline void Atomic::inc_ptr(volatile void* dest)
  61 {
  62  add_ptr(1, dest);
  63 }
  64 
  65 inline void Atomic::dec (volatile jint* dest)
  66 {
  67  add(-1, dest);
  68 }
  69 
  70 inline void Atomic::dec_ptr(volatile void* dest)
  71 {
  72  add_ptr(-1, dest);
  73 }


  87 
  88 template<size_t byte_size>
  89 template<typename T>
  90 inline T Atomic::PlatformCmpxchg<byte_size>::operator()(T exchange_value,
  91                                                         T volatile* dest,
  92                                                         T compare_value,
  93                                                         cmpxchg_memory_order order) const {
  94   STATIC_ASSERT(byte_size == sizeof(T));
  95   if (order == memory_order_relaxed) {
  96     T value = compare_value;
  97     __atomic_compare_exchange(dest, &value, &exchange_value, /*weak*/false,
  98                               __ATOMIC_RELAXED, __ATOMIC_RELAXED);
  99     return value;
 100   } else {
 101     return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
 102   }
 103 }
 104 
 105 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
 106 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
 107 
 108 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest)
 109 {
 110  return __sync_add_and_fetch(dest, add_value);
 111 }
 112 
 113 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest)
 114 {
 115   return (void *) add_ptr(add_value, (volatile intptr_t *) dest);
 116 }
 117 
 118 inline void Atomic::inc_ptr(volatile intptr_t* dest)
 119 {
 120  add_ptr(1, dest);
 121 }
 122 
 123 inline void Atomic::dec_ptr(volatile intptr_t* dest)
 124 {
 125  add_ptr(-1, dest);
 126 }
 127 
 128 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest)
 129 {
 130   intptr_t res = __sync_lock_test_and_set (dest, exchange_value);
 131   FULL_MEM_BARRIER;
 132   return res;
 133 }
 134 
 135 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
 136 


  30 
  31 // Implementation of class atomic
  32 
  33 #define FULL_MEM_BARRIER  __sync_synchronize()
  34 #define READ_MEM_BARRIER  __atomic_thread_fence(__ATOMIC_ACQUIRE);
  35 #define WRITE_MEM_BARRIER __atomic_thread_fence(__ATOMIC_RELEASE);
  36 
  37 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
  38 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
  39 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
  40 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
  41 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
  42 
  43 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
  44 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
  45 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
  46 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
  47 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
  48 
  49 
  50 template<size_t byte_size>
  51 struct Atomic::PlatformAdd
  52   : Atomic::AddAndFetch<Atomic::PlatformAdd<byte_size> >
  53 {
  54   template<typename I, typename D>
  55   D add_and_fetch(I add_value, D volatile* dest) const {
  56     return __sync_add_and_fetch(dest, add_value);
  57   }
  58 };
  59 
  60 inline void Atomic::inc(volatile jint* dest)
  61 {
  62  add(1, dest);
  63 }
  64 
  65 inline void Atomic::inc_ptr(volatile void* dest)
  66 {
  67  add_ptr(1, dest);
  68 }
  69 
  70 inline void Atomic::dec (volatile jint* dest)
  71 {
  72  add(-1, dest);
  73 }
  74 
  75 inline void Atomic::dec_ptr(volatile void* dest)
  76 {
  77  add_ptr(-1, dest);
  78 }


  92 
  93 template<size_t byte_size>
  94 template<typename T>
  95 inline T Atomic::PlatformCmpxchg<byte_size>::operator()(T exchange_value,
  96                                                         T volatile* dest,
  97                                                         T compare_value,
  98                                                         cmpxchg_memory_order order) const {
  99   STATIC_ASSERT(byte_size == sizeof(T));
 100   if (order == memory_order_relaxed) {
 101     T value = compare_value;
 102     __atomic_compare_exchange(dest, &value, &exchange_value, /*weak*/false,
 103                               __ATOMIC_RELAXED, __ATOMIC_RELAXED);
 104     return value;
 105   } else {
 106     return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
 107   }
 108 }
 109 
 110 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
 111 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }










 112 
 113 inline void Atomic::inc_ptr(volatile intptr_t* dest)
 114 {
 115  add_ptr(1, dest);
 116 }
 117 
 118 inline void Atomic::dec_ptr(volatile intptr_t* dest)
 119 {
 120  add_ptr(-1, dest);
 121 }
 122 
 123 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest)
 124 {
 125   intptr_t res = __sync_lock_test_and_set (dest, exchange_value);
 126   FULL_MEM_BARRIER;
 127   return res;
 128 }
 129 
 130 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
 131 
< prev index next >