< prev index next >

src/hotspot/os_cpu/windows_x86/atomic_windows_x86.hpp

Print this page




 196   }
 197 }
 198 
 199 template<>
 200 template<typename T>
 201 inline T Atomic::PlatformLoad<8>::operator()(T const volatile* src) const {
 202   STATIC_ASSERT(8 == sizeof(T));
 203   volatile T dest;
 204   volatile T* pdest = &dest;
 205   __asm {
 206     mov eax, src
 207     fild     qword ptr [eax]
 208     mov eax, pdest
 209     fistp    qword ptr [eax]
 210   }
 211   return dest;
 212 }
 213 
 214 template<>
 215 template<typename T>
 216 inline void Atomic::PlatformStore<8>::operator()(T store_value,
 217                                                  T volatile* dest) const {
 218   STATIC_ASSERT(8 == sizeof(T));
 219   volatile T* src = &store_value;
 220   __asm {
 221     mov eax, src
 222     fild     qword ptr [eax]
 223     mov eax, dest
 224     fistp    qword ptr [eax]
 225   }
 226 }
 227 
 228 #endif // AMD64
 229 
 230 #pragma warning(default: 4035) // Enables warnings reporting missing return statement
 231 
 232 #ifndef AMD64
 233 template<>
 234 struct Atomic::PlatformOrderedStore<1, RELEASE_X_FENCE>
 235 {
 236   template <typename T>
 237   void operator()(T v, volatile T* p) const {
 238     __asm {
 239       mov edx, p;
 240       mov al, v;
 241       xchg al, byte ptr [edx];
 242     }
 243   }
 244 };
 245 
 246 template<>
 247 struct Atomic::PlatformOrderedStore<2, RELEASE_X_FENCE>
 248 {
 249   template <typename T>
 250   void operator()(T v, volatile T* p) const {
 251     __asm {
 252       mov edx, p;
 253       mov ax, v;
 254       xchg ax, word ptr [edx];
 255     }
 256   }
 257 };
 258 
 259 template<>
 260 struct Atomic::PlatformOrderedStore<4, RELEASE_X_FENCE>
 261 {
 262   template <typename T>
 263   void operator()(T v, volatile T* p) const {
 264     __asm {
 265       mov edx, p;
 266       mov eax, v;
 267       xchg eax, dword ptr [edx];
 268     }
 269   }
 270 };
 271 #endif // AMD64
 272 
 273 #endif // OS_CPU_WINDOWS_X86_ATOMIC_WINDOWS_X86_HPP


 196   }
 197 }
 198 
 199 template<>
 200 template<typename T>
 201 inline T Atomic::PlatformLoad<8>::operator()(T const volatile* src) const {
 202   STATIC_ASSERT(8 == sizeof(T));
 203   volatile T dest;
 204   volatile T* pdest = &dest;
 205   __asm {
 206     mov eax, src
 207     fild     qword ptr [eax]
 208     mov eax, pdest
 209     fistp    qword ptr [eax]
 210   }
 211   return dest;
 212 }
 213 
 214 template<>
 215 template<typename T>
 216 inline void Atomic::PlatformStore<8>::operator()(T volatile* dest,
 217                                                  T store_value) const {
 218   STATIC_ASSERT(8 == sizeof(T));
 219   volatile T* src = &store_value;
 220   __asm {
 221     mov eax, src
 222     fild     qword ptr [eax]
 223     mov eax, dest
 224     fistp    qword ptr [eax]
 225   }
 226 }
 227 
 228 #endif // AMD64
 229 
 230 #pragma warning(default: 4035) // Enables warnings reporting missing return statement
 231 
 232 #ifndef AMD64
 233 template<>
 234 struct Atomic::PlatformOrderedStore<1, RELEASE_X_FENCE>
 235 {
 236   template <typename T>
 237   void operator()(volatile T* p, T v) const {
 238     __asm {
 239       mov edx, p;
 240       mov al, v;
 241       xchg al, byte ptr [edx];
 242     }
 243   }
 244 };
 245 
 246 template<>
 247 struct Atomic::PlatformOrderedStore<2, RELEASE_X_FENCE>
 248 {
 249   template <typename T>
 250   void operator()(volatile T* p, T v) const {
 251     __asm {
 252       mov edx, p;
 253       mov ax, v;
 254       xchg ax, word ptr [edx];
 255     }
 256   }
 257 };
 258 
 259 template<>
 260 struct Atomic::PlatformOrderedStore<4, RELEASE_X_FENCE>
 261 {
 262   template <typename T>
 263   void operator()(volatile T* p, T v) const {
 264     __asm {
 265       mov edx, p;
 266       mov eax, v;
 267       xchg eax, dword ptr [edx];
 268     }
 269   }
 270 };
 271 #endif // AMD64
 272 
 273 #endif // OS_CPU_WINDOWS_X86_ATOMIC_WINDOWS_X86_HPP
< prev index next >