< prev index next >

src/os_cpu/solaris_x86/vm/atomic_solaris_x86.inline.hpp

Print this page
rev 11110 : 8155949: Support relaxed semantics in cmpxchg
Reviewed-by:
Contributed-by: HORII@jp.ibm.com, mdoerr

@@ -83,19 +83,19 @@
 inline jint     Atomic::xchg       (jint     exchange_value, volatile jint*     dest) {
   return _Atomic_xchg(exchange_value, dest);
 }
 
 #define VM_HAS_SPECIALIZED_CMPXCHG_BYTE
-inline jbyte    Atomic::cmpxchg    (jbyte    exchange_value, volatile jbyte*    dest, jbyte    compare_value) {
+inline jbyte    Atomic::cmpxchg    (jbyte    exchange_value, volatile jbyte*    dest, jbyte    compare_value, cmpxchg_memory_order order) {
   return _Atomic_cmpxchg_byte(exchange_value, dest, compare_value IS_MP_ARG());
 }
 
-inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value) {
+inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value, cmpxchg_memory_order order) {
   return _Atomic_cmpxchg(exchange_value, dest, compare_value IS_MP_ARG());
 }
 
-inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value) {
+inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value, cmpxchg_memory_order order) {
   return _Atomic_cmpxchg_long(exchange_value, dest, compare_value IS_MP_ARG());
 }
 
 
 #ifdef AMD64

@@ -118,16 +118,16 @@
 
 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
   return (void*)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest);
 }
 
-inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
-  return (intptr_t)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
+inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
+  return (intptr_t)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order);
 }
 
-inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
-  return (void*)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
+inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value, cmpxchg_memory_order order) {
+  return (void*)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order);
 }
 
 inline jlong Atomic::load(volatile jlong* src) { return *src; }
 
 #else // !AMD64

@@ -146,16 +146,16 @@
 
 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
   return (void*)xchg((jint)exchange_value, (volatile jint*)dest);
 }
 
-inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
-  return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
+inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
+  return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
 }
 
-inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
-  return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
+inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value, cmpxchg_memory_order order) {
+  return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
 }
 
 extern "C" void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
 
 inline jlong Atomic::load(volatile jlong* src) {
< prev index next >