< prev index next >

src/hotspot/os_cpu/linux_x86/atomic_linux_x86.hpp

Print this page




 116 }
 117 
 118 template<>
 119 template<typename T>
 120 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
 121                                                 T volatile* dest,
 122                                                 T compare_value,
 123                                                 cmpxchg_memory_order /* order */) const {
 124   STATIC_ASSERT(8 == sizeof(T));
 125   __asm__ __volatile__ ("lock cmpxchgq %1,(%3)"
 126                         : "=a" (exchange_value)
 127                         : "r" (exchange_value), "a" (compare_value), "r" (dest)
 128                         : "cc", "memory");
 129   return exchange_value;
 130 }
 131 
 132 #else // !AMD64
 133 
 134 extern "C" {
 135   // defined in linux_x86.s
 136   jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong);
 137   void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst);
 138 }
 139 
 140 template<>
 141 template<typename T>
 142 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
 143                                                 T volatile* dest,
 144                                                 T compare_value,
 145                                                 cmpxchg_memory_order order) const {
 146   STATIC_ASSERT(8 == sizeof(T));
 147   return cmpxchg_using_helper<jlong>(_Atomic_cmpxchg_long, exchange_value, dest, compare_value);
 148 }
 149 
 150 template<>
 151 template<typename T>
 152 inline T Atomic::PlatformLoad<8>::operator()(T const volatile* src) const {
 153   STATIC_ASSERT(8 == sizeof(T));
 154   volatile jlong dest;
 155   _Atomic_move_long(reinterpret_cast<const volatile jlong*>(src), reinterpret_cast<volatile jlong*>(&dest));
 156   return PrimitiveConversions::cast<T>(dest);
 157 }
 158 
 159 template<>
 160 template<typename T>
 161 inline void Atomic::PlatformStore<8>::operator()(T store_value,
 162                                                  T volatile* dest) const {
 163   STATIC_ASSERT(8 == sizeof(T));
 164   _Atomic_move_long(reinterpret_cast<const volatile jlong*>(&store_value), reinterpret_cast<volatile jlong*>(dest));
 165 }
 166 
 167 #endif // AMD64
 168 
 169 #endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP


 116 }
 117 
 118 template<>
 119 template<typename T>
 120 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
 121                                                 T volatile* dest,
 122                                                 T compare_value,
 123                                                 cmpxchg_memory_order /* order */) const {
 124   STATIC_ASSERT(8 == sizeof(T));
 125   __asm__ __volatile__ ("lock cmpxchgq %1,(%3)"
 126                         : "=a" (exchange_value)
 127                         : "r" (exchange_value), "a" (compare_value), "r" (dest)
 128                         : "cc", "memory");
 129   return exchange_value;
 130 }
 131 
 132 #else // !AMD64
 133 
 134 extern "C" {
 135   // defined in linux_x86.s
 136   int64_t _Atomic_cmpxchg_long(int64_t, volatile int64_t*, int64_t);
 137   void _Atomic_move_long(const volatile int64_t* src, volatile int64_t* dst);
 138 }
 139 
 140 template<>
 141 template<typename T>
 142 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
 143                                                 T volatile* dest,
 144                                                 T compare_value,
 145                                                 cmpxchg_memory_order order) const {
 146   STATIC_ASSERT(8 == sizeof(T));
 147   return cmpxchg_using_helper<int64_t>(_Atomic_cmpxchg_long, exchange_value, dest, compare_value);
 148 }
 149 
 150 template<>
 151 template<typename T>
 152 inline T Atomic::PlatformLoad<8>::operator()(T const volatile* src) const {
 153   STATIC_ASSERT(8 == sizeof(T));
 154   volatile int64_t dest;
 155   _Atomic_move_long(reinterpret_cast<const volatile int64_t*>(src), reinterpret_cast<volatile int64_t*>(&dest));
 156   return PrimitiveConversions::cast<T>(dest);
 157 }
 158 
 159 template<>
 160 template<typename T>
 161 inline void Atomic::PlatformStore<8>::operator()(T store_value,
 162                                                  T volatile* dest) const {
 163   STATIC_ASSERT(8 == sizeof(T));
 164   _Atomic_move_long(reinterpret_cast<const volatile int64_t*>(&store_value), reinterpret_cast<volatile int64_t*>(dest));
 165 }
 166 
 167 #endif // AMD64
 168 
 169 #endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
< prev index next >