< prev index next >

src/hotspot/os_cpu/windows_x86/atomic_windows_x86.hpp

Print this page

        

@@ -92,16 +92,16 @@
 #undef DEFINE_STUB_XCHG
 
 #define DEFINE_STUB_CMPXCHG(ByteSize, StubType, StubName)               \
   template<>                                                            \
   template<typename T>                                                  \
-  inline T Atomic::PlatformCmpxchg<ByteSize>::operator()(T exchange_value, \
-                                                         T volatile* dest, \
+  inline T Atomic::PlatformCmpxchg<ByteSize>::operator()(T volatile* dest, \
                                                          T compare_value, \
+                                                         T exchange_value, \
                                                          atomic_memory_order order) const { \
     STATIC_ASSERT(ByteSize == sizeof(T));                               \
-    return cmpxchg_using_helper<StubType>(StubName, exchange_value, dest, compare_value); \
+    return cmpxchg_using_helper<StubType>(StubName, dest, compare_value, exchange_value); \
   }
 
 DEFINE_STUB_CMPXCHG(1, int8_t,  os::atomic_cmpxchg_byte_func)
 DEFINE_STUB_CMPXCHG(4, int32_t, os::atomic_cmpxchg_func)
 DEFINE_STUB_CMPXCHG(8, int64_t, os::atomic_cmpxchg_long_func)

@@ -139,13 +139,13 @@
   }
 }
 
 template<>
 template<typename T>
-inline T Atomic::PlatformCmpxchg<1>::operator()(T exchange_value,
-                                                T volatile* dest,
+inline T Atomic::PlatformCmpxchg<1>::operator()(T volatile* dest,
                                                 T compare_value,
+                                                T exchange_value,
                                                 atomic_memory_order order) const {
   STATIC_ASSERT(1 == sizeof(T));
   // alternative for InterlockedCompareExchange
   __asm {
     mov edx, dest

@@ -155,13 +155,13 @@
   }
 }
 
 template<>
 template<typename T>
-inline T Atomic::PlatformCmpxchg<4>::operator()(T exchange_value,
-                                                T volatile* dest,
+inline T Atomic::PlatformCmpxchg<4>::operator()(T volatile* dest,
                                                 T compare_value,
+                                                T exchange_value,
                                                 atomic_memory_order order) const {
   STATIC_ASSERT(4 == sizeof(T));
   // alternative for InterlockedCompareExchange
   __asm {
     mov edx, dest

@@ -171,13 +171,13 @@
   }
 }
 
 template<>
 template<typename T>
-inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
-                                                T volatile* dest,
+inline T Atomic::PlatformCmpxchg<8>::operator()(T volatile* dest,
                                                 T compare_value,
+                                                T exchange_value,
                                                 atomic_memory_order order) const {
   STATIC_ASSERT(8 == sizeof(T));
   int32_t ex_lo  = (int32_t)exchange_value;
   int32_t ex_hi  = *( ((int32_t*)&exchange_value) + 1 );
   int32_t cmp_lo = (int32_t)compare_value;
< prev index next >