56 inline D Atomic::PlatformAdd<4>::add_and_fetch(I add_value, D volatile* dest) const {
57 return add_using_helper<jint>(os::atomic_add_func, add_value, dest);
58 }
59
60 template<>
61 template<typename I, typename D>
62 inline D Atomic::PlatformAdd<8>::add_and_fetch(I add_value, D volatile* dest) const {
63 return add_using_helper<intptr_t>(os::atomic_add_ptr_func, add_value, dest);
64 }
65
66 #define DEFINE_STUB_XCHG(ByteSize, StubType, StubName) \
67 template<> \
68 template<typename T> \
69 inline T Atomic::PlatformXchg<ByteSize>::operator()(T exchange_value, \
70 T volatile* dest) const { \
71 STATIC_ASSERT(ByteSize == sizeof(T)); \
72 return xchg_using_helper<StubType>(StubName, exchange_value, dest); \
73 }
74
75 DEFINE_STUB_XCHG(4, jint, os::atomic_xchg_func)
76 DEFINE_STUB_XCHG(8, jlong, os::atomic_xchg_ptr_func)
77
78 #undef DEFINE_STUB_XCHG
79
80 #define DEFINE_STUB_CMPXCHG(ByteSize, StubType, StubName) \
81 template<> \
82 template<typename T> \
83 inline T Atomic::PlatformCmpxchg<ByteSize>::operator()(T exchange_value, \
84 T volatile* dest, \
85 T compare_value, \
86 cmpxchg_memory_order order) const { \
87 STATIC_ASSERT(ByteSize == sizeof(T)); \
88 return cmpxchg_using_helper<StubType>(StubName, exchange_value, dest, compare_value); \
89 }
90
91 DEFINE_STUB_CMPXCHG(1, jbyte, os::atomic_cmpxchg_byte_func)
92 DEFINE_STUB_CMPXCHG(4, jint, os::atomic_cmpxchg_func)
93 DEFINE_STUB_CMPXCHG(8, jlong, os::atomic_cmpxchg_long_func)
94
95 #undef DEFINE_STUB_CMPXCHG
96
|
56 inline D Atomic::PlatformAdd<4>::add_and_fetch(I add_value, D volatile* dest) const {
57 return add_using_helper<jint>(os::atomic_add_func, add_value, dest);
58 }
59
60 template<>
61 template<typename I, typename D>
62 inline D Atomic::PlatformAdd<8>::add_and_fetch(I add_value, D volatile* dest) const {
63 return add_using_helper<intptr_t>(os::atomic_add_ptr_func, add_value, dest);
64 }
65
66 #define DEFINE_STUB_XCHG(ByteSize, StubType, StubName) \
67 template<> \
68 template<typename T> \
69 inline T Atomic::PlatformXchg<ByteSize>::operator()(T exchange_value, \
70 T volatile* dest) const { \
71 STATIC_ASSERT(ByteSize == sizeof(T)); \
72 return xchg_using_helper<StubType>(StubName, exchange_value, dest); \
73 }
74
75 DEFINE_STUB_XCHG(4, jint, os::atomic_xchg_func)
76 DEFINE_STUB_XCHG(8, jlong, os::atomic_xchg_long_func)
77
78 #undef DEFINE_STUB_XCHG
79
80 #define DEFINE_STUB_CMPXCHG(ByteSize, StubType, StubName) \
81 template<> \
82 template<typename T> \
83 inline T Atomic::PlatformCmpxchg<ByteSize>::operator()(T exchange_value, \
84 T volatile* dest, \
85 T compare_value, \
86 cmpxchg_memory_order order) const { \
87 STATIC_ASSERT(ByteSize == sizeof(T)); \
88 return cmpxchg_using_helper<StubType>(StubName, exchange_value, dest, compare_value); \
89 }
90
91 DEFINE_STUB_CMPXCHG(1, jbyte, os::atomic_cmpxchg_byte_func)
92 DEFINE_STUB_CMPXCHG(4, jint, os::atomic_cmpxchg_func)
93 DEFINE_STUB_CMPXCHG(8, jlong, os::atomic_cmpxchg_long_func)
94
95 #undef DEFINE_STUB_CMPXCHG
96
|