< prev index next >

src/os_cpu/linux_aarch64/vm/atomic_linux_aarch64.inline.hpp

Print this page
rev 10437 : 8151775: aarch64: add support for 8.1 LSE atomic operations
Reviewed-by: aph


  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #ifndef OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_INLINE_HPP
  27 #define OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_INLINE_HPP
  28 
  29 #include "runtime/atomic.hpp"
  30 #include "runtime/os.hpp"
  31 #include "vm_version_aarch64.hpp"
  32 
  33 // Implementation of class atomic
  34 
  35 #define FULL_MEM_BARRIER  __sync_synchronize()
  36 #define READ_MEM_BARRIER  __atomic_thread_fence(__ATOMIC_ACQUIRE);
  37 #define WRITE_MEM_BARRIER __atomic_thread_fence(__ATOMIC_RELEASE);
  38 













  39 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
  40 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
  41 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
  42 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
  43 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
  44 
  45 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
  46 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
  47 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
  48 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
  49 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
  50 
  51 
  52 inline jint Atomic::add(jint add_value, volatile jint* dest)
  53 {











  54  return __sync_add_and_fetch(dest, add_value);
  55 }
  56 
  57 inline void Atomic::inc(volatile jint* dest)
  58 {
  59  add(1, dest);
  60 }
  61 
  62 inline void Atomic::inc_ptr(volatile void* dest)
  63 {
  64  add_ptr(1, dest);
  65 }
  66 
  67 inline void Atomic::dec (volatile jint* dest)
  68 {
  69  add(-1, dest);
  70 }
  71 
  72 inline void Atomic::dec_ptr(volatile void* dest)
  73 {
  74  add_ptr(-1, dest);
  75 }
  76 
  77 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest)
  78 {












  79   jint res = __sync_lock_test_and_set (dest, exchange_value);
  80   FULL_MEM_BARRIER;
  81   return res;
  82 }
  83 
  84 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest)
  85 {
  86   return (void *) xchg_ptr((intptr_t) exchange_value,
  87                            (volatile intptr_t*) dest);
  88 }
  89 
  90 
  91 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value)
  92 {











  93  return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
  94 }
  95 
  96 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
  97 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
  98 
  99 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest)
 100 {











 101  return __sync_add_and_fetch(dest, add_value);
 102 }
 103 
 104 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest)
 105 {
 106   return (void *) add_ptr(add_value, (volatile intptr_t *) dest);
 107 }
 108 
 109 inline void Atomic::inc_ptr(volatile intptr_t* dest)
 110 {
 111  add_ptr(1, dest);
 112 }
 113 
 114 inline void Atomic::dec_ptr(volatile intptr_t* dest)
 115 {
 116  add_ptr(-1, dest);
 117 }
 118 
 119 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest)
 120 {












 121   intptr_t res = __sync_lock_test_and_set (dest, exchange_value);
 122   FULL_MEM_BARRIER;
 123   return res;
 124 }
 125 
 126 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value)
 127 {











 128  return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
 129 }
 130 
 131 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value)
 132 {











 133  return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
 134 }
 135 
 136 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value)
 137 {
 138   return (void *) cmpxchg_ptr((intptr_t) exchange_value,
 139                               (volatile intptr_t*) dest,
 140                               (intptr_t) compare_value);
 141 }
 142 
 143 inline jlong Atomic::load(volatile jlong* src) { return *src; }
 144 
 145 #endif // OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_INLINE_HPP


  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #ifndef OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_INLINE_HPP
  27 #define OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_INLINE_HPP
  28 
  29 #include "runtime/atomic.hpp"
  30 #include "runtime/os.hpp"
  31 #include "vm_version_aarch64.hpp"
  32 
  33 // Implementation of class atomic
  34 
  35 #define FULL_MEM_BARRIER  __sync_synchronize()
  36 #define READ_MEM_BARRIER  __atomic_thread_fence(__ATOMIC_ACQUIRE);
  37 #define WRITE_MEM_BARRIER __atomic_thread_fence(__ATOMIC_RELEASE);
  38 
  39 // CASALW w2, w0, [x1]
  40 #define CASALW          ".word 0b10001000111000101111110000100000;"
  41 // CASAL x2, x0, [x1]
  42 #define CASAL           ".word 0b11001000111000101111110000100000;"
  43 // LDADDALW w0, w2, [x1]
  44 #define LDADDALW        ".word 0b10111000111000000000000000100010;"
  45 // LDADDAL w0, w2, [x1]
  46 #define LDADDAL         ".word 0b11111000111000000000000000100010;"
  47 // SWPW w0, w2, [x1]
  48 #define SWPW            ".word 0b10111000001000001000000000100010;"
  49 // SWP x0, x2, [x1]
  50 #define SWP             ".word 0b11111000001000001000000000100010;"
  51 
  52 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
  53 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
  54 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
  55 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
  56 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
  57 
  58 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
  59 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
  60 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
  61 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
  62 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
  63 
  64 
  65 inline jint Atomic::add(jint add_value, volatile jint* dest)
  66 {
  67  if (UseLSE) {
  68    register jint r_add_value asm("w0") = add_value;
  69    register volatile jint *r_dest asm("x1") = dest;
  70    register jint r_result asm("w2");
  71    __asm volatile(LDADDALW
  72                   : [_result]"=r"(r_result)
  73                   : [_add_value]"r"(r_add_value),
  74                     [_dest]"r"(r_dest)
  75                   : "memory");
  76    return r_result+add_value;
  77  }
  78  return __sync_add_and_fetch(dest, add_value);
  79 }
  80 
  81 inline void Atomic::inc(volatile jint* dest)
  82 {
  83  add(1, dest);
  84 }
  85 
  86 inline void Atomic::inc_ptr(volatile void* dest)
  87 {
  88  add_ptr(1, dest);
  89 }
  90 
  91 inline void Atomic::dec (volatile jint* dest)
  92 {
  93  add(-1, dest);
  94 }
  95 
  96 inline void Atomic::dec_ptr(volatile void* dest)
  97 {
  98  add_ptr(-1, dest);
  99 }
 100 
 101 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest)
 102 {
 103   if (UseLSE) {
 104    register jint r_exchange_value asm("w0") = exchange_value;
 105    register volatile jint *r_dest asm("x1") = dest;
 106    register jint r_result asm("w2");
 107    __asm volatile(SWPW
 108                   : [_result]"=r"(r_result)
 109                   : [_exchange_value]"r"(r_exchange_value),
 110                     [_dest]"r"(r_dest)
 111                   : "memory");
 112    FULL_MEM_BARRIER;
 113    return r_result;
 114   }
 115   jint res = __sync_lock_test_and_set (dest, exchange_value);
 116   FULL_MEM_BARRIER;
 117   return res;
 118 }
 119 
 120 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest)
 121 {
 122   return (void *) xchg_ptr((intptr_t) exchange_value,
 123                            (volatile intptr_t*) dest);
 124 }
 125 

 126 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value)
 127 {
 128  if (UseLSE) {
 129    register jint r_exchange_value asm("w0") = exchange_value;
 130    register volatile jint *r_dest asm("x1") = dest;
 131    register jint r_compare_value asm("w2") = compare_value;
 132    __asm volatile(CASALW
 133                   : [_compare_value]"+r"(r_compare_value)
 134                   : [_exchange_value]"r"(r_exchange_value),
 135                     [_dest]"r"(r_dest)
 136                   : "memory");
 137    return r_compare_value;
 138  }
 139  return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
 140 }
 141 
 142 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
 143 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
 144 
 145 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest)
 146 {
 147  if (UseLSE) {
 148    register intptr_t r_add_value asm("x0") = add_value;
 149    register volatile intptr_t *r_dest asm("x1") = dest;
 150    register intptr_t r_result asm("x2");
 151    __asm volatile(LDADDAL
 152                   : [_result]"=r"(r_result)
 153                   : [_add_value]"r"(r_add_value),
 154                     [_dest]"r"(r_dest)
 155                   : "memory");
 156    return r_result+add_value;
 157  }
 158  return __sync_add_and_fetch(dest, add_value);
 159 }
 160 
 161 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest)
 162 {
 163   return (void *) add_ptr(add_value, (volatile intptr_t *) dest);
 164 }
 165 
 166 inline void Atomic::inc_ptr(volatile intptr_t* dest)
 167 {
 168  add_ptr(1, dest);
 169 }
 170 
 171 inline void Atomic::dec_ptr(volatile intptr_t* dest)
 172 {
 173  add_ptr(-1, dest);
 174 }
 175 
 176 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest)
 177 {
 178   if (UseLSE) {
 179    register intptr_t r_exchange_value asm("x0") = exchange_value;
 180    register volatile intptr_t *r_dest asm("x1") = dest;
 181    register intptr_t r_result asm("x2");
 182    __asm volatile(SWP
 183                   : [_result]"=r"(r_result)
 184                   : [_exchange_value]"r"(r_exchange_value),
 185                     [_dest]"r"(r_dest)
 186                   : "memory");
 187    FULL_MEM_BARRIER;
 188    return r_result;
 189   }
 190   intptr_t res = __sync_lock_test_and_set (dest, exchange_value);
 191   FULL_MEM_BARRIER;
 192   return res;
 193 }
 194 
 195 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value)
 196 {
 197  if (UseLSE) {
 198    register jlong r_exchange_value asm("x0") = exchange_value;
 199    register volatile jlong *r_dest asm("x1") = dest;
 200    register jlong r_compare_value asm("x2") = compare_value;
 201    __asm volatile(CASAL
 202                   : [_compare_value]"+r"(r_compare_value)
 203                   : [_exchange_value]"r"(r_exchange_value),
 204                     [_dest]"r"(r_dest)
 205                   : "memory");
 206    return r_compare_value;
 207  }
 208  return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
 209 }
 210 
 211 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value)
 212 {
 213  if (UseLSE) {
 214    register intptr_t r_exchange_value asm("x0") = exchange_value;
 215    register volatile intptr_t *r_dest asm("x1") = dest;
 216    register intptr_t r_compare_value asm("x2") = compare_value;
 217    __asm volatile(CASAL
 218                   : [_compare_value]"+r"(r_compare_value)
 219                   : [_exchange_value]"r"(r_exchange_value),
 220                     [_dest]"r"(r_dest)
 221                   : "memory");
 222    return r_compare_value;
 223  }
 224  return __sync_val_compare_and_swap(dest, compare_value, exchange_value);
 225 }
 226 
 227 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value)
 228 {
 229   return (void *) cmpxchg_ptr((intptr_t) exchange_value,
 230                               (volatile intptr_t*) dest,
 231                               (intptr_t) compare_value);
 232 }
 233 
 234 inline jlong Atomic::load(volatile jlong* src) { return *src; }
 235 
 236 #endif // OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_INLINE_HPP
< prev index next >