8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
26 #define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
27
28 #include "runtime/os.hpp"
29
30 // Implementation of class atomic
31
32 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; }
33 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; }
34 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; }
35 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
36 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; }
37
38 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; }
39 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; }
40 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; }
41 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
42 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; }
43
44
45 inline jint Atomic::add (jint add_value, volatile jint* dest) {
46 jint addend = add_value;
47 __asm__ volatile ( "lock xaddl %0,(%2)"
48 : "=r" (addend)
49 : "0" (addend), "r" (dest)
64 __asm__ volatile ( "lock subl $1,(%0)" :
65 : "r" (dest) : "cc", "memory");
66 }
67
68 inline void Atomic::dec_ptr(volatile void* dest) {
69 dec_ptr((volatile intptr_t*)dest);
70 }
71
72 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) {
73 __asm__ volatile ( "xchgl (%2),%0"
74 : "=r" (exchange_value)
75 : "0" (exchange_value), "r" (dest)
76 : "memory");
77 return exchange_value;
78 }
79
80 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
81 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
82 }
83
84 #define VM_HAS_SPECIALIZED_CMPXCHG_BYTE
85 inline jbyte Atomic::cmpxchg (jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, cmpxchg_memory_order order) {
86 __asm__ volatile ("lock cmpxchgb %1,(%3)"
87 : "=a" (exchange_value)
88 : "q" (exchange_value), "a" (compare_value), "r" (dest)
89 : "cc", "memory");
90 return exchange_value;
91 }
92
93 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) {
94 __asm__ volatile ("lock cmpxchgl %1,(%3)"
95 : "=a" (exchange_value)
96 : "r" (exchange_value), "a" (compare_value), "r" (dest)
97 : "cc", "memory");
98 return exchange_value;
99 }
100
101 #ifdef AMD64
102 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
103 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
104
105 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
106 intptr_t addend = add_value;
107 __asm__ __volatile__ ("lock xaddq %0,(%2)"
108 : "=r" (addend)
109 : "0" (addend), "r" (dest)
110 : "cc", "memory");
111 return addend + add_value;
112 }
113
120 :
121 : "r" (dest)
122 : "cc", "memory");
123 }
124
125 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
126 __asm__ __volatile__ ("lock subq $1,(%0)"
127 :
128 : "r" (dest)
129 : "cc", "memory");
130 }
131
132 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
133 __asm__ __volatile__ ("xchgq (%2),%0"
134 : "=r" (exchange_value)
135 : "0" (exchange_value), "r" (dest)
136 : "memory");
137 return exchange_value;
138 }
139
140 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) {
141 __asm__ __volatile__ ("lock cmpxchgq %1,(%3)"
142 : "=a" (exchange_value)
143 : "r" (exchange_value), "a" (compare_value), "r" (dest)
144 : "cc", "memory");
145 return exchange_value;
146 }
147
148 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
149 return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order);
150 }
151
152 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) {
153 return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order);
154 }
155
156 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
157
158 #else // !AMD64
159
160 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
161 return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest);
162 }
163
164 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
165 return (void*)Atomic::add((jint)add_value, (volatile jint*)dest);
166 }
167
168
169 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
170 inc((volatile jint*)dest);
171 }
172
173 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
174 dec((volatile jint*)dest);
175 }
176
177 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
178 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
179 }
180
181 extern "C" {
182 // defined in linux_x86.s
183 jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong);
184 void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst);
185 }
186
187 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) {
188 return _Atomic_cmpxchg_long(exchange_value, dest, compare_value);
189 }
190
191 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
192 return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
193 }
194
195 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) {
196 return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
197 }
198
199 inline jlong Atomic::load(const volatile jlong* src) {
200 volatile jlong dest;
201 _Atomic_move_long(src, &dest);
202 return dest;
203 }
204
205 inline void Atomic::store(jlong store_value, jlong* dest) {
206 _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
207 }
208
209 inline void Atomic::store(jlong store_value, volatile jlong* dest) {
210 _Atomic_move_long((volatile jlong*)&store_value, dest);
211 }
212
213 #endif // AMD64
214
215 #endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
|
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
26 #define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
27
28 // Implementation of class atomic
29
30 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; }
31 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; }
32 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; }
33 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
34 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; }
35
36 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; }
37 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; }
38 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; }
39 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
40 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; }
41
42
43 inline jint Atomic::add (jint add_value, volatile jint* dest) {
44 jint addend = add_value;
45 __asm__ volatile ( "lock xaddl %0,(%2)"
46 : "=r" (addend)
47 : "0" (addend), "r" (dest)
62 __asm__ volatile ( "lock subl $1,(%0)" :
63 : "r" (dest) : "cc", "memory");
64 }
65
66 inline void Atomic::dec_ptr(volatile void* dest) {
67 dec_ptr((volatile intptr_t*)dest);
68 }
69
70 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) {
71 __asm__ volatile ( "xchgl (%2),%0"
72 : "=r" (exchange_value)
73 : "0" (exchange_value), "r" (dest)
74 : "memory");
75 return exchange_value;
76 }
77
78 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
79 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
80 }
81
82 template<>
83 template<typename T>
84 inline T Atomic::PlatformCmpxchg<1>::operator()(T exchange_value,
85 T volatile* dest,
86 T compare_value,
87 cmpxchg_memory_order /* order */) const {
88 STATIC_ASSERT(1 == sizeof(T));
89 __asm__ volatile ("lock cmpxchgb %1,(%3)"
90 : "=a" (exchange_value)
91 : "q" (exchange_value), "a" (compare_value), "r" (dest)
92 : "cc", "memory");
93 return exchange_value;
94 }
95
96 template<>
97 template<typename T>
98 inline T Atomic::PlatformCmpxchg<4>::operator()(T exchange_value,
99 T volatile* dest,
100 T compare_value,
101 cmpxchg_memory_order /* order */) const {
102 STATIC_ASSERT(4 == sizeof(T));
103 __asm__ volatile ("lock cmpxchgl %1,(%3)"
104 : "=a" (exchange_value)
105 : "r" (exchange_value), "a" (compare_value), "r" (dest)
106 : "cc", "memory");
107 return exchange_value;
108 }
109
110 #ifdef AMD64
111 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
112 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
113
114 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
115 intptr_t addend = add_value;
116 __asm__ __volatile__ ("lock xaddq %0,(%2)"
117 : "=r" (addend)
118 : "0" (addend), "r" (dest)
119 : "cc", "memory");
120 return addend + add_value;
121 }
122
129 :
130 : "r" (dest)
131 : "cc", "memory");
132 }
133
134 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
135 __asm__ __volatile__ ("lock subq $1,(%0)"
136 :
137 : "r" (dest)
138 : "cc", "memory");
139 }
140
141 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
142 __asm__ __volatile__ ("xchgq (%2),%0"
143 : "=r" (exchange_value)
144 : "0" (exchange_value), "r" (dest)
145 : "memory");
146 return exchange_value;
147 }
148
149 template<>
150 template<typename T>
151 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
152 T volatile* dest,
153 T compare_value,
154 cmpxchg_memory_order /* order */) const {
155 STATIC_ASSERT(8 == sizeof(T));
156 __asm__ __volatile__ ("lock cmpxchgq %1,(%3)"
157 : "=a" (exchange_value)
158 : "r" (exchange_value), "a" (compare_value), "r" (dest)
159 : "cc", "memory");
160 return exchange_value;
161 }
162
163 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
164
165 #else // !AMD64
166
167 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
168 return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest);
169 }
170
171 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
172 return (void*)Atomic::add((jint)add_value, (volatile jint*)dest);
173 }
174
175
176 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
177 inc((volatile jint*)dest);
178 }
179
180 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
181 dec((volatile jint*)dest);
182 }
183
184 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
185 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
186 }
187
188 extern "C" {
189 // defined in linux_x86.s
190 jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong);
191 void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst);
192 }
193
194 template<>
195 template<typename T>
196 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
197 T volatile* dest,
198 T compare_value,
199 cmpxchg_memory_order order) const {
200 STATIC_ASSERT(8 == sizeof(T));
201 return cmpxchg_using_helper<jlong>(_Atomic_cmpxchg_long, exchange_value, dest, compare_value);
202 }
203
204 inline jlong Atomic::load(const volatile jlong* src) {
205 volatile jlong dest;
206 _Atomic_move_long(src, &dest);
207 return dest;
208 }
209
210 inline void Atomic::store(jlong store_value, jlong* dest) {
211 _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
212 }
213
214 inline void Atomic::store(jlong store_value, volatile jlong* dest) {
215 _Atomic_move_long((volatile jlong*)&store_value, dest);
216 }
217
218 #endif // AMD64
219
220 #endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
|