< prev index next >

src/hotspot/os_cpu/bsd_x86/atomic_bsd_x86.hpp

Print this page




 152 
 153 template<>
 154 template<typename T>
 155 inline T Atomic::PlatformLoad<8>::operator()(T const volatile* src) const {
 156   STATIC_ASSERT(8 == sizeof(T));
 157   volatile int64_t dest;
 158   _Atomic_move_long(reinterpret_cast<const volatile int64_t*>(src), reinterpret_cast<volatile int64_t*>(&dest));
 159   return PrimitiveConversions::cast<T>(dest);
 160 }
 161 
 162 template<>
 163 template<typename T>
 164 inline void Atomic::PlatformStore<8>::operator()(T store_value,
 165                                                  T volatile* dest) const {
 166   STATIC_ASSERT(8 == sizeof(T));
 167   _Atomic_move_long(reinterpret_cast<const volatile int64_t*>(&store_value), reinterpret_cast<volatile int64_t*>(dest));
 168 }
 169 
 170 #endif // AMD64
 171 


















































 172 #endif // OS_CPU_BSD_X86_ATOMIC_BSD_X86_HPP


 152 
 153 template<>
 154 template<typename T>
 155 inline T Atomic::PlatformLoad<8>::operator()(T const volatile* src) const {
 156   STATIC_ASSERT(8 == sizeof(T));
 157   volatile int64_t dest;
 158   _Atomic_move_long(reinterpret_cast<const volatile int64_t*>(src), reinterpret_cast<volatile int64_t*>(&dest));
 159   return PrimitiveConversions::cast<T>(dest);
 160 }
 161 
 162 template<>
 163 template<typename T>
 164 inline void Atomic::PlatformStore<8>::operator()(T store_value,
 165                                                  T volatile* dest) const {
 166   STATIC_ASSERT(8 == sizeof(T));
 167   _Atomic_move_long(reinterpret_cast<const volatile int64_t*>(&store_value), reinterpret_cast<volatile int64_t*>(dest));
 168 }
 169 
 170 #endif // AMD64
 171 
 172 template<>
 173 struct Atomic::PlatformOrderedStore<1, RELEASE_X_FENCE>
 174 {
 175   template <typename T>
 176   void operator()(T v, volatile T* p) const {
 177     __asm__ volatile (  "xchgb (%2),%0"
 178                       : "=q" (v)
 179                       : "0" (v), "r" (p)
 180                       : "memory");
 181   }
 182 };
 183 
 184 template<>
 185 struct Atomic::PlatformOrderedStore<2, RELEASE_X_FENCE>
 186 {
 187   template <typename T>
 188   void operator()(T v, volatile T* p) const {
 189     __asm__ volatile (  "xchgw (%2),%0"
 190                       : "=r" (v)
 191                       : "0" (v), "r" (p)
 192                       : "memory");
 193   }
 194 };
 195 
 196 template<>
 197 struct Atomic::PlatformOrderedStore<4, RELEASE_X_FENCE>
 198 {
 199   template <typename T>
 200   void operator()(T v, volatile T* p) const {
 201     __asm__ volatile (  "xchgl (%2),%0"
 202                       : "=r" (v)
 203                       : "0" (v), "r" (p)
 204                       : "memory");
 205   }
 206 };
 207 
 208 #ifdef AMD64
 209 template<>
 210 struct Atomic::PlatformOrderedStore<8, RELEASE_X_FENCE>
 211 {
 212   template <typename T>
 213   void operator()(T v, volatile T* p) const {
 214     __asm__ volatile (  "xchgq (%2), %0"
 215                       : "=r" (v)
 216                       : "0" (v), "r" (p)
 217                       : "memory");
 218   }
 219 };
 220 #endif // AMD64
 221 
 222 #endif // OS_CPU_BSD_X86_ATOMIC_BSD_X86_HPP
< prev index next >