104 intptr_t rv = exchange_value;
105 __asm__ volatile(
106 "1:\n\t"
107 " mov %1, %%o3\n\t"
108 " ldx [%2], %%o2\n\t"
109 " casx [%2], %%o2, %%o3\n\t"
110 " cmp %%o2, %%o3\n\t"
111 " bne %%xcc, 1b\n\t"
112 " nop\n\t"
113 " mov %%o2, %0\n\t"
114 : "=r" (rv)
115 : "r" (exchange_value), "r" (dest)
116 : "memory", "o2", "o3");
117 return rv;
118 }
119
120 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
121 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
122 }
123
124
125 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) {
126 jint rv;
127 __asm__ volatile(
128 " cas [%2], %3, %0"
129 : "=r" (rv)
130 : "0" (exchange_value), "r" (dest), "r" (compare_value)
131 : "memory");
132 return rv;
133 }
134
135 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) {
136 jlong rv;
137 __asm__ volatile(
138 " casx [%2], %3, %0"
139 : "=r" (rv)
140 : "0" (exchange_value), "r" (dest), "r" (compare_value)
141 : "memory");
142 return rv;
143 }
144
145 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
146 intptr_t rv;
147 __asm__ volatile(
148 " casx [%2], %3, %0"
149 : "=r" (rv)
150 : "0" (exchange_value), "r" (dest), "r" (compare_value)
151 : "memory");
152 return rv;
153 }
154
155 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) {
156 return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value, order);
157 }
158
159 #endif // OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
|
104 intptr_t rv = exchange_value;
105 __asm__ volatile(
106 "1:\n\t"
107 " mov %1, %%o3\n\t"
108 " ldx [%2], %%o2\n\t"
109 " casx [%2], %%o2, %%o3\n\t"
110 " cmp %%o2, %%o3\n\t"
111 " bne %%xcc, 1b\n\t"
112 " nop\n\t"
113 " mov %%o2, %0\n\t"
114 : "=r" (rv)
115 : "r" (exchange_value), "r" (dest)
116 : "memory", "o2", "o3");
117 return rv;
118 }
119
120 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
121 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
122 }
123
124 // No direct support for cmpxchg of bytes; emulate using int.
125 template<>
126 struct Atomic::PlatformCmpxchg<1> : Atomic::CmpxchgByteUsingInt {};
127
128 template<>
129 template<typename T>
130 inline T Atomic::PlatformCmpxchg<4>::operator()(T exchange_value,
131 T volatile* dest,
132 T compare_value,
133 cmpxchg_memory_order order) const {
134 STATIC_ASSERT(4 == sizeof(T));
135 T rv;
136 __asm__ volatile(
137 " cas [%2], %3, %0"
138 : "=r" (rv)
139 : "0" (exchange_value), "r" (dest), "r" (compare_value)
140 : "memory");
141 return rv;
142 }
143
144 template<>
145 template<typename T>
146 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
147 T volatile* dest,
148 T compare_value,
149 cmpxchg_memory_order order) const {
150 STATIC_ASSERT(8 == sizeof(T));
151 T rv;
152 __asm__ volatile(
153 " casx [%2], %3, %0"
154 : "=r" (rv)
155 : "0" (exchange_value), "r" (dest), "r" (compare_value)
156 : "memory");
157 return rv;
158 }
159
160 #endif // OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
|