< prev index next >

src/os_cpu/linux_aarch64/vm/atomic_linux_aarch64.hpp

Print this page
rev 13446 : imported patch linux_aarch64
rev 13452 : [mq]: coleen_review1


  68 }
  69 
  70 inline void Atomic::dec_ptr(volatile void* dest)
  71 {
  72  add_ptr(-1, dest);
  73 }
  74 
  75 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest)
  76 {
  77   jint res = __sync_lock_test_and_set (dest, exchange_value);
  78   FULL_MEM_BARRIER;
  79   return res;
  80 }
  81 
  82 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest)
  83 {
  84   return (void *) xchg_ptr((intptr_t) exchange_value,
  85                            (volatile intptr_t*) dest);
  86 }
  87 
  88 template <typename T> T generic_cmpxchg(T exchange_value, volatile T* dest,
  89                                         T compare_value, cmpxchg_memory_order order)
  90 {




  91   if (order == memory_order_relaxed) {
  92     T value = compare_value;
  93     __atomic_compare_exchange(dest, &value, &exchange_value, /*weak*/false,
  94                               __ATOMIC_RELAXED, __ATOMIC_RELAXED);
  95     return value;
  96   } else {
  97     return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
  98   }
  99 }
 100 
 101 #define VM_HAS_SPECIALIZED_CMPXCHG_BYTE
 102 inline jbyte Atomic::cmpxchg (jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, cmpxchg_memory_order order)
 103 {
 104   return generic_cmpxchg(exchange_value, dest, compare_value, order);
 105 }
 106 
 107 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order)
 108 {
 109   return generic_cmpxchg(exchange_value, dest, compare_value, order);
 110 }
 111 
 112 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
 113 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
 114 
 115 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest)
 116 {
 117  return __sync_add_and_fetch(dest, add_value);
 118 }
 119 
 120 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest)
 121 {
 122   return (void *) add_ptr(add_value, (volatile intptr_t *) dest);
 123 }
 124 
 125 inline void Atomic::inc_ptr(volatile intptr_t* dest)
 126 {
 127  add_ptr(1, dest);
 128 }
 129 
 130 inline void Atomic::dec_ptr(volatile intptr_t* dest)
 131 {
 132  add_ptr(-1, dest);
 133 }
 134 
 135 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest)
 136 {
 137   intptr_t res = __sync_lock_test_and_set (dest, exchange_value);
 138   FULL_MEM_BARRIER;
 139   return res;
 140 }
 141 
 142 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order)
 143 {
 144   return generic_cmpxchg(exchange_value, dest, compare_value, order);
 145 }
 146 
 147 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order)
 148 {
 149   return generic_cmpxchg(exchange_value, dest, compare_value, order);
 150 }
 151 
 152 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order)
 153 {
 154   return (void *) cmpxchg_ptr((intptr_t) exchange_value,
 155                               (volatile intptr_t*) dest,
 156                               (intptr_t) compare_value,
 157                               order);
 158 }
 159 
 160 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
 161 
 162 #endif // OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_HPP


  68 }
  69 
  70 inline void Atomic::dec_ptr(volatile void* dest)
  71 {
  72  add_ptr(-1, dest);
  73 }
  74 
  75 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest)
  76 {
  77   jint res = __sync_lock_test_and_set (dest, exchange_value);
  78   FULL_MEM_BARRIER;
  79   return res;
  80 }
  81 
  82 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest)
  83 {
  84   return (void *) xchg_ptr((intptr_t) exchange_value,
  85                            (volatile intptr_t*) dest);
  86 }
  87 
  88 template<size_t byte_size>
  89 template<typename T>
  90 inline T Atomic::PlatformCmpxchg<byte_size>::operator()(T exchange_value,
  91                                                         T volatile* dest,
  92                                                         T compare_value,
  93                                                         cmpxchg_memory_order) const {
  94   STATIC_ASSERT(byte_size == sizeof(T));
  95   if (order == memory_order_relaxed) {
  96     T value = compare_value;
  97     __atomic_compare_exchange(dest, &value, &exchange_value, /*weak*/false,
  98                               __ATOMIC_RELAXED, __ATOMIC_RELAXED);
  99     return value;
 100   } else {
 101     return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
 102   }
 103 }
 104 











 105 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
 106 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
 107 
 108 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest)
 109 {
 110  return __sync_add_and_fetch(dest, add_value);
 111 }
 112 
 113 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest)
 114 {
 115   return (void *) add_ptr(add_value, (volatile intptr_t *) dest);
 116 }
 117 
 118 inline void Atomic::inc_ptr(volatile intptr_t* dest)
 119 {
 120  add_ptr(1, dest);
 121 }
 122 
 123 inline void Atomic::dec_ptr(volatile intptr_t* dest)
 124 {
 125  add_ptr(-1, dest);
 126 }
 127 
 128 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest)
 129 {
 130   intptr_t res = __sync_lock_test_and_set (dest, exchange_value);
 131   FULL_MEM_BARRIER;
 132   return res;


















 133 }
 134 
 135 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
 136 
 137 #endif // OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_HPP
< prev index next >