160 " nop\n\t"
161 " mov %%o2, %0\n\t"
162 : "=r" (rv)
163 : "r" (exchange_value), "r" (dest)
164 : "memory", "o2", "o3");
165 #else //_LP64
166 __asm__ volatile(
167 "swap [%2],%1\n\t"
168 : "=r" (rv)
169 : "0" (exchange_value) /* we use same register as for return value */, "r" (dest)
170 : "memory");
171 #endif // _LP64
172 return rv;
173 }
174
175 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
176 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
177 }
178
179
180 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) {
181 jint rv;
182 __asm__ volatile(
183 " cas [%2], %3, %0"
184 : "=r" (rv)
185 : "0" (exchange_value), "r" (dest), "r" (compare_value)
186 : "memory");
187 return rv;
188 }
189
190 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) {
191 #ifdef _LP64
192 jlong rv;
193 __asm__ volatile(
194 " casx [%2], %3, %0"
195 : "=r" (rv)
196 : "0" (exchange_value), "r" (dest), "r" (compare_value)
197 : "memory");
198 return rv;
199 #else //_LP64
200 volatile jlong_accessor evl, cvl, rv;
201 evl.long_value = exchange_value;
202 cvl.long_value = compare_value;
203
204 __asm__ volatile(
205 " sllx %2, 32, %2\n\t"
206 " srl %3, 0, %3\n\t"
207 " or %2, %3, %2\n\t"
208 " sllx %5, 32, %5\n\t"
209 " srl %6, 0, %6\n\t"
210 " or %5, %6, %5\n\t"
211 " casx [%4], %5, %2\n\t"
212 " srl %2, 0, %1\n\t"
213 " srlx %2, 32, %0\n\t"
214 : "=r" (rv.words[0]), "=r" (rv.words[1])
215 : "r" (evl.words[0]), "r" (evl.words[1]), "r" (dest), "r" (cvl.words[0]), "r" (cvl.words[1])
216 : "memory");
217
218 return rv.long_value;
219 #endif //_LP64
220 }
221
222 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
223 intptr_t rv;
224 #ifdef _LP64
225 __asm__ volatile(
226 " casx [%2], %3, %0"
227 : "=r" (rv)
228 : "0" (exchange_value), "r" (dest), "r" (compare_value)
229 : "memory");
230 #else //_LP64
231 __asm__ volatile(
232 " cas [%2], %3, %0"
233 : "=r" (rv)
234 : "0" (exchange_value), "r" (dest), "r" (compare_value)
235 : "memory");
236 #endif // _LP64
237 return rv;
238 }
239
240 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) {
241 return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value);
242 }
243
244 #else // _GNU_SOURCE
245
246 #if defined(COMPILER2) || defined(_LP64)
247
248 // This is the interface to the atomic instructions in solaris_sparc.il.
249 // It's very messy because we need to support v8 and these instructions
250 // are illegal there. When sparc v8 is dropped, we can drop out lots of
251 // this code. Also compiler2 does not support v8 so the conditional code
252 // omits the instruction set check.
253
254 extern "C" jint _Atomic_swap32(jint exchange_value, volatile jint* dest);
255 extern "C" intptr_t _Atomic_swap64(intptr_t exchange_value, volatile intptr_t* dest);
256
257 extern "C" jint _Atomic_cas32(jint exchange_value, volatile jint* dest, jint compare_value);
258 extern "C" intptr_t _Atomic_cas64(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value);
259 extern "C" jlong _Atomic_casl (jlong exchange_value, volatile jlong* dest, jlong compare_value);
260
261 extern "C" jint _Atomic_add32(jint inc, volatile jint* dest);
279 }
280
281
282 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) {
283 return _Atomic_swap32(exchange_value, dest);
284 }
285
286 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
287 #ifdef _LP64
288 return _Atomic_swap64(exchange_value, dest);
289 #else // _LP64
290 return _Atomic_swap32(exchange_value, dest);
291 #endif // _LP64
292 }
293
294 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
295 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
296 }
297
298
299 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) {
300 return _Atomic_cas32(exchange_value, dest, compare_value);
301 }
302
303 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) {
304 #ifdef _LP64
305 // Return 64 bit value in %o0
306 return _Atomic_cas64((intptr_t)exchange_value, (intptr_t *)dest, (intptr_t)compare_value);
307 #else // _LP64
308 // Return 64 bit value in %o0,%o1 by hand
309 return _Atomic_casl(exchange_value, dest, compare_value);
310 #endif // _LP64
311 }
312
313 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
314 #ifdef _LP64
315 return _Atomic_cas64(exchange_value, dest, compare_value);
316 #else // _LP64
317 return _Atomic_cas32(exchange_value, dest, compare_value);
318 #endif // _LP64
319 }
320
321 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) {
322 return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value);
323 }
324
325
326 #else // _LP64 || COMPILER2
327
328
329 // 32-bit compiler1 only
330
331 inline jint Atomic::add (jint add_value, volatile jint* dest) {
332 return (*os::atomic_add_func)(add_value, dest);
333 }
334
335 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
336 return (intptr_t)add((jint)add_value, (volatile jint*)dest);
337 }
338
339 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
340 return (void*)add((jint)add_value, (volatile jint*)dest);
341 }
342
343
344 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) {
345 return (*os::atomic_xchg_func)(exchange_value, dest);
346 }
347
348 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
349 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
350 }
351
352 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
353 return (void*)xchg((jint)exchange_value, (volatile jint*)dest);
354 }
355
356
357 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) {
358 return (*os::atomic_cmpxchg_func)(exchange_value, dest, compare_value);
359 }
360
361 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) {
362 return (*os::atomic_cmpxchg_long_func)(exchange_value, dest, compare_value);
363 }
364
365 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
366 return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
367 }
368
369 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) {
370 return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
371 }
372
373 #endif // _LP64 || COMPILER2
374
375 #endif // _GNU_SOURCE
376
377 #endif // OS_CPU_SOLARIS_SPARC_VM_ATOMIC_SOLARIS_SPARC_INLINE_HPP
|
160 " nop\n\t"
161 " mov %%o2, %0\n\t"
162 : "=r" (rv)
163 : "r" (exchange_value), "r" (dest)
164 : "memory", "o2", "o3");
165 #else //_LP64
166 __asm__ volatile(
167 "swap [%2],%1\n\t"
168 : "=r" (rv)
169 : "0" (exchange_value) /* we use same register as for return value */, "r" (dest)
170 : "memory");
171 #endif // _LP64
172 return rv;
173 }
174
175 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
176 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
177 }
178
179
180 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) {
181 jint rv;
182 __asm__ volatile(
183 " cas [%2], %3, %0"
184 : "=r" (rv)
185 : "0" (exchange_value), "r" (dest), "r" (compare_value)
186 : "memory");
187 return rv;
188 }
189
190 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) {
191 #ifdef _LP64
192 jlong rv;
193 __asm__ volatile(
194 " casx [%2], %3, %0"
195 : "=r" (rv)
196 : "0" (exchange_value), "r" (dest), "r" (compare_value)
197 : "memory");
198 return rv;
199 #else //_LP64
200 volatile jlong_accessor evl, cvl, rv;
201 evl.long_value = exchange_value;
202 cvl.long_value = compare_value;
203
204 __asm__ volatile(
205 " sllx %2, 32, %2\n\t"
206 " srl %3, 0, %3\n\t"
207 " or %2, %3, %2\n\t"
208 " sllx %5, 32, %5\n\t"
209 " srl %6, 0, %6\n\t"
210 " or %5, %6, %5\n\t"
211 " casx [%4], %5, %2\n\t"
212 " srl %2, 0, %1\n\t"
213 " srlx %2, 32, %0\n\t"
214 : "=r" (rv.words[0]), "=r" (rv.words[1])
215 : "r" (evl.words[0]), "r" (evl.words[1]), "r" (dest), "r" (cvl.words[0]), "r" (cvl.words[1])
216 : "memory");
217
218 return rv.long_value;
219 #endif //_LP64
220 }
221
222 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
223 intptr_t rv;
224 #ifdef _LP64
225 __asm__ volatile(
226 " casx [%2], %3, %0"
227 : "=r" (rv)
228 : "0" (exchange_value), "r" (dest), "r" (compare_value)
229 : "memory");
230 #else //_LP64
231 __asm__ volatile(
232 " cas [%2], %3, %0"
233 : "=r" (rv)
234 : "0" (exchange_value), "r" (dest), "r" (compare_value)
235 : "memory");
236 #endif // _LP64
237 return rv;
238 }
239
240 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) {
241 return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value, order);
242 }
243
244 #else // _GNU_SOURCE
245
246 #if defined(COMPILER2) || defined(_LP64)
247
248 // This is the interface to the atomic instructions in solaris_sparc.il.
249 // It's very messy because we need to support v8 and these instructions
250 // are illegal there. When sparc v8 is dropped, we can drop out lots of
251 // this code. Also compiler2 does not support v8 so the conditional code
252 // omits the instruction set check.
253
254 extern "C" jint _Atomic_swap32(jint exchange_value, volatile jint* dest);
255 extern "C" intptr_t _Atomic_swap64(intptr_t exchange_value, volatile intptr_t* dest);
256
257 extern "C" jint _Atomic_cas32(jint exchange_value, volatile jint* dest, jint compare_value);
258 extern "C" intptr_t _Atomic_cas64(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value);
259 extern "C" jlong _Atomic_casl (jlong exchange_value, volatile jlong* dest, jlong compare_value);
260
261 extern "C" jint _Atomic_add32(jint inc, volatile jint* dest);
279 }
280
281
282 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) {
283 return _Atomic_swap32(exchange_value, dest);
284 }
285
286 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
287 #ifdef _LP64
288 return _Atomic_swap64(exchange_value, dest);
289 #else // _LP64
290 return _Atomic_swap32(exchange_value, dest);
291 #endif // _LP64
292 }
293
294 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
295 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
296 }
297
298
299 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) {
300 return _Atomic_cas32(exchange_value, dest, compare_value);
301 }
302
303 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) {
304 #ifdef _LP64
305 // Return 64 bit value in %o0
306 return _Atomic_cas64((intptr_t)exchange_value, (intptr_t *)dest, (intptr_t)compare_value);
307 #else // _LP64
308 // Return 64 bit value in %o0,%o1 by hand
309 return _Atomic_casl(exchange_value, dest, compare_value);
310 #endif // _LP64
311 }
312
313 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
314 #ifdef _LP64
315 return _Atomic_cas64(exchange_value, dest, compare_value);
316 #else // _LP64
317 return _Atomic_cas32(exchange_value, dest, compare_value);
318 #endif // _LP64
319 }
320
321 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) {
322 return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value, order);
323 }
324
325
326 #else // _LP64 || COMPILER2
327
328
329 // 32-bit compiler1 only
330
331 inline jint Atomic::add (jint add_value, volatile jint* dest) {
332 return (*os::atomic_add_func)(add_value, dest);
333 }
334
335 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
336 return (intptr_t)add((jint)add_value, (volatile jint*)dest);
337 }
338
339 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
340 return (void*)add((jint)add_value, (volatile jint*)dest);
341 }
342
343
344 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) {
345 return (*os::atomic_xchg_func)(exchange_value, dest);
346 }
347
348 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
349 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
350 }
351
352 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
353 return (void*)xchg((jint)exchange_value, (volatile jint*)dest);
354 }
355
356
357 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) {
358 return (*os::atomic_cmpxchg_func)(exchange_value, dest, compare_value);
359 }
360
361 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) {
362 return (*os::atomic_cmpxchg_long_func)(exchange_value, dest, compare_value);
363 }
364
365 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
366 return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
367 }
368
369 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) {
370 return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
371 }
372
373 #endif // _LP64 || COMPILER2
374
375 #endif // _GNU_SOURCE
376
377 #endif // OS_CPU_SOLARIS_SPARC_VM_ATOMIC_SOLARIS_SPARC_INLINE_HPP
|