< prev index next >

src/os_cpu/linux_sparc/vm/atomic_linux_sparc.inline.hpp

Print this page
rev 11200 : 8155949: Support relaxed semantics in cmpxchg
Reviewed-by:
Contributed-by: HORII@jp.ibm.com, mdoerr


 131     "  nop\n\t"
 132     " mov    %%o2, %0\n\t"
 133     : "=r" (rv)
 134     : "r" (exchange_value), "r" (dest)
 135     : "memory", "o2", "o3");
 136 #else
 137   __asm__ volatile(
 138     "swap    [%2],%1\n\t"
 139     : "=r" (rv)
 140     : "0" (exchange_value) /* we use same register as for return value */, "r" (dest)
 141     : "memory");
 142 #endif // _LP64
 143   return rv;
 144 }
 145 
 146 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
 147   return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
 148 }
 149 
 150 
 151 inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value) {
 152   jint rv;
 153   __asm__ volatile(
 154     " cas    [%2], %3, %0"
 155     : "=r" (rv)
 156     : "0" (exchange_value), "r" (dest), "r" (compare_value)
 157     : "memory");
 158   return rv;
 159 }
 160 
 161 inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value) {
 162 #ifdef _LP64
 163   jlong rv;
 164   __asm__ volatile(
 165     " casx   [%2], %3, %0"
 166     : "=r" (rv)
 167     : "0" (exchange_value), "r" (dest), "r" (compare_value)
 168     : "memory");
 169   return rv;
 170 #else
 171   volatile jlong_accessor evl, cvl, rv;
 172   evl.long_value = exchange_value;
 173   cvl.long_value = compare_value;
 174 
 175   __asm__ volatile(
 176     " sllx   %2, 32, %2\n\t"
 177     " srl    %3, 0,  %3\n\t"
 178     " or     %2, %3, %2\n\t"
 179     " sllx   %5, 32, %5\n\t"
 180     " srl    %6, 0,  %6\n\t"
 181     " or     %5, %6, %5\n\t"
 182     " casx   [%4], %5, %2\n\t"
 183     " srl    %2, 0, %1\n\t"
 184     " srlx   %2, 32, %0\n\t"
 185     : "=r" (rv.words[0]), "=r" (rv.words[1])
 186     : "r"  (evl.words[0]), "r" (evl.words[1]), "r" (dest), "r" (cvl.words[0]), "r" (cvl.words[1])
 187     : "memory");
 188 
 189   return rv.long_value;
 190 #endif
 191 }
 192 
 193 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
 194   intptr_t rv;
 195 #ifdef _LP64
 196   __asm__ volatile(
 197     " casx    [%2], %3, %0"
 198     : "=r" (rv)
 199     : "0" (exchange_value), "r" (dest), "r" (compare_value)
 200     : "memory");
 201 #else
 202   __asm__ volatile(
 203     " cas     [%2], %3, %0"
 204     : "=r" (rv)
 205     : "0" (exchange_value), "r" (dest), "r" (compare_value)
 206     : "memory");
 207 #endif // _LP64
 208   return rv;
 209 }
 210 
 211 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
 212   return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value);
 213 }
 214 
 215 #endif // OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP


 131     "  nop\n\t"
 132     " mov    %%o2, %0\n\t"
 133     : "=r" (rv)
 134     : "r" (exchange_value), "r" (dest)
 135     : "memory", "o2", "o3");
 136 #else
 137   __asm__ volatile(
 138     "swap    [%2],%1\n\t"
 139     : "=r" (rv)
 140     : "0" (exchange_value) /* we use same register as for return value */, "r" (dest)
 141     : "memory");
 142 #endif // _LP64
 143   return rv;
 144 }
 145 
 146 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
 147   return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
 148 }
 149 
 150 
 151 inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value, cmpxchg_memory_order order) {
 152   jint rv;
 153   __asm__ volatile(
 154     " cas    [%2], %3, %0"
 155     : "=r" (rv)
 156     : "0" (exchange_value), "r" (dest), "r" (compare_value)
 157     : "memory");
 158   return rv;
 159 }
 160 
 161 inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value, cmpxchg_memory_order order) {
 162 #ifdef _LP64
 163   jlong rv;
 164   __asm__ volatile(
 165     " casx   [%2], %3, %0"
 166     : "=r" (rv)
 167     : "0" (exchange_value), "r" (dest), "r" (compare_value)
 168     : "memory");
 169   return rv;
 170 #else
 171   volatile jlong_accessor evl, cvl, rv;
 172   evl.long_value = exchange_value;
 173   cvl.long_value = compare_value;
 174 
 175   __asm__ volatile(
 176     " sllx   %2, 32, %2\n\t"
 177     " srl    %3, 0,  %3\n\t"
 178     " or     %2, %3, %2\n\t"
 179     " sllx   %5, 32, %5\n\t"
 180     " srl    %6, 0,  %6\n\t"
 181     " or     %5, %6, %5\n\t"
 182     " casx   [%4], %5, %2\n\t"
 183     " srl    %2, 0, %1\n\t"
 184     " srlx   %2, 32, %0\n\t"
 185     : "=r" (rv.words[0]), "=r" (rv.words[1])
 186     : "r"  (evl.words[0]), "r" (evl.words[1]), "r" (dest), "r" (cvl.words[0]), "r" (cvl.words[1])
 187     : "memory");
 188 
 189   return rv.long_value;
 190 #endif
 191 }
 192 
 193 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
 194   intptr_t rv;
 195 #ifdef _LP64
 196   __asm__ volatile(
 197     " casx    [%2], %3, %0"
 198     : "=r" (rv)
 199     : "0" (exchange_value), "r" (dest), "r" (compare_value)
 200     : "memory");
 201 #else
 202   __asm__ volatile(
 203     " cas     [%2], %3, %0"
 204     : "=r" (rv)
 205     : "0" (exchange_value), "r" (dest), "r" (compare_value)
 206     : "memory");
 207 #endif // _LP64
 208   return rv;
 209 }
 210 
 211 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value, cmpxchg_memory_order order) {
 212   return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value, order);
 213 }
 214 
 215 #endif // OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
< prev index next >