--- old/src/hotspot/os_cpu/windows_x86/atomic_windows_x86.hpp 2019-11-21 11:52:55.386692624 +0100 +++ new/src/hotspot/os_cpu/windows_x86/atomic_windows_x86.hpp 2019-11-21 11:52:55.134688507 +0100 @@ -57,23 +57,23 @@ struct Atomic::PlatformAdd : Atomic::AddAndFetch > { - template - D add_and_fetch(I add_value, D volatile* dest, atomic_memory_order order) const; + template + D add_and_fetch(D volatile* dest, I add_value, atomic_memory_order order) const; }; #ifdef AMD64 template<> -template -inline D Atomic::PlatformAdd<4>::add_and_fetch(I add_value, D volatile* dest, +template +inline D Atomic::PlatformAdd<4>::add_and_fetch(D volatile* dest, I add_value, atomic_memory_order order) const { - return add_using_helper(os::atomic_add_func, add_value, dest); + return add_using_helper(os::atomic_add_func, dest, add_value); } template<> -template -inline D Atomic::PlatformAdd<8>::add_and_fetch(I add_value, D volatile* dest, +template +inline D Atomic::PlatformAdd<8>::add_and_fetch(D volatile* dest, I add_value, atomic_memory_order order) const { - return add_using_helper(os::atomic_add_long_func, add_value, dest); + return add_using_helper(os::atomic_add_long_func, dest, add_value); } #define DEFINE_STUB_XCHG(ByteSize, StubType, StubName) \ @@ -111,8 +111,8 @@ #else // !AMD64 template<> -template -inline D Atomic::PlatformAdd<4>::add_and_fetch(I add_value, D volatile* dest, +template +inline D Atomic::PlatformAdd<4>::add_and_fetch(D volatile* dest, I add_value, atomic_memory_order order) const { STATIC_ASSERT(4 == sizeof(I)); STATIC_ASSERT(4 == sizeof(D));