--- old/src/hotspot/os_cpu/bsd_x86/atomic_bsd_x86.hpp 2019-11-21 11:52:50.262608919 +0100 +++ new/src/hotspot/os_cpu/bsd_x86/atomic_bsd_x86.hpp 2019-11-21 11:52:50.006604737 +0100 @@ -31,13 +31,13 @@ struct Atomic::PlatformAdd : Atomic::FetchAndAdd > { - template - D fetch_and_add(I add_value, D volatile* dest, atomic_memory_order /* order */) const; + template + D fetch_and_add(D volatile* dest, I add_value, atomic_memory_order /* order */) const; }; template<> -template -inline D Atomic::PlatformAdd<4>::fetch_and_add(I add_value, D volatile* dest, +template +inline D Atomic::PlatformAdd<4>::fetch_and_add(D volatile* dest, I add_value, atomic_memory_order /* order */) const { STATIC_ASSERT(4 == sizeof(I)); STATIC_ASSERT(4 == sizeof(D)); @@ -92,8 +92,8 @@ #ifdef AMD64 template<> -template -inline D Atomic::PlatformAdd<8>::fetch_and_add(I add_value, D volatile* dest, +template +inline D Atomic::PlatformAdd<8>::fetch_and_add(D volatile* dest, I add_value, atomic_memory_order /* order */) const { STATIC_ASSERT(8 == sizeof(I)); STATIC_ASSERT(8 == sizeof(D));