< prev index next >

src/hotspot/os_cpu/solaris_x86/atomic_solaris_x86.hpp

Print this page

        

@@ -42,30 +42,30 @@
 
 template<size_t byte_size>
 struct Atomic::PlatformAdd
   : Atomic::AddAndFetch<Atomic::PlatformAdd<byte_size> >
 {
-  template<typename I, typename D>
-  D add_and_fetch(I add_value, D volatile* dest, atomic_memory_order order) const;
+  template<typename D, typename I>
+  D add_and_fetch(D volatile* dest, I add_value, atomic_memory_order order) const;
 };
 
 // Not using add_using_helper; see comment for cmpxchg.
 template<>
-template<typename I, typename D>
-inline D Atomic::PlatformAdd<4>::add_and_fetch(I add_value, D volatile* dest,
+template<typename D, typename I>
+inline D Atomic::PlatformAdd<4>::add_and_fetch(D volatile* dest, I add_value,
                                                atomic_memory_order order) const {
   STATIC_ASSERT(4 == sizeof(I));
   STATIC_ASSERT(4 == sizeof(D));
   return PrimitiveConversions::cast<D>(
     _Atomic_add(PrimitiveConversions::cast<int32_t>(add_value),
                 reinterpret_cast<int32_t volatile*>(dest)));
 }
 
 // Not using add_using_helper; see comment for cmpxchg.
 template<>
-template<typename I, typename D>
-inline D Atomic::PlatformAdd<8>::add_and_fetch(I add_value, D volatile* dest,
+template<typename D, typename I>
+inline D Atomic::PlatformAdd<8>::add_and_fetch(D volatile* dest, I add_value,
                                                atomic_memory_order order) const {
   STATIC_ASSERT(8 == sizeof(I));
   STATIC_ASSERT(8 == sizeof(D));
   return PrimitiveConversions::cast<D>(
     _Atomic_add_long(PrimitiveConversions::cast<int64_t>(add_value),
< prev index next >