10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
26 #define OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
27
28 // Implementation of class atomic
29
30 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; }
31 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; }
32 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; }
33 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
34 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
35 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; }
36
37 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; }
38 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; }
39 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; }
40 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
41 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
42 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; }
43
44 inline void Atomic::inc (volatile jint* dest) { (void)add (1, dest); }
45 inline void Atomic::inc_ptr(volatile intptr_t* dest) { (void)add_ptr(1, dest); }
46 inline void Atomic::inc_ptr(volatile void* dest) { (void)add_ptr(1, dest); }
47
48 inline void Atomic::dec (volatile jint* dest) { (void)add (-1, dest); }
49 inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); }
50 inline void Atomic::dec_ptr(volatile void* dest) { (void)add_ptr(-1, dest); }
51
52 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
53
54 inline jint Atomic::add (jint add_value, volatile jint* dest) {
55 intptr_t rv;
56 __asm__ volatile(
57 "1: \n\t"
58 " ld [%2], %%o2\n\t"
59 " add %1, %%o2, %%o3\n\t"
60 " cas [%2], %%o2, %%o3\n\t"
61 " cmp %%o2, %%o3\n\t"
62 " bne 1b\n\t"
63 " nop\n\t"
64 " add %1, %%o2, %0\n\t"
65 : "=r" (rv)
66 : "r" (add_value), "r" (dest)
67 : "memory", "o2", "o3");
68 return rv;
69 }
70
71 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
72 intptr_t rv;
73 __asm__ volatile(
74 "1: \n\t"
75 " ldx [%2], %%o2\n\t"
76 " add %1, %%o2, %%o3\n\t"
77 " casx [%2], %%o2, %%o3\n\t"
78 " cmp %%o2, %%o3\n\t"
79 " bne %%xcc, 1b\n\t"
80 " nop\n\t"
81 " add %1, %%o2, %0\n\t"
82 : "=r" (rv)
83 : "r" (add_value), "r" (dest)
84 : "memory", "o2", "o3");
85 return rv;
86 }
87
88 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
89 return (void*)add_ptr((intptr_t)add_value, (volatile intptr_t*)dest);
90 }
91
92
93 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) {
94 intptr_t rv = exchange_value;
95 __asm__ volatile(
96 " swap [%2],%1\n\t"
97 : "=r" (rv)
98 : "0" (exchange_value) /* we use same register as for return value */, "r" (dest)
99 : "memory");
100 return rv;
101 }
102
103 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
104 intptr_t rv = exchange_value;
105 __asm__ volatile(
106 "1:\n\t"
107 " mov %1, %%o3\n\t"
108 " ldx [%2], %%o2\n\t"
109 " casx [%2], %%o2, %%o3\n\t"
110 " cmp %%o2, %%o3\n\t"
111 " bne %%xcc, 1b\n\t"
112 " nop\n\t"
113 " mov %%o2, %0\n\t"
114 : "=r" (rv)
115 : "r" (exchange_value), "r" (dest)
116 : "memory", "o2", "o3");
117 return rv;
118 }
119
120 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
121 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
122 }
123
124
125 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) {
126 jint rv;
127 __asm__ volatile(
128 " cas [%2], %3, %0"
129 : "=r" (rv)
130 : "0" (exchange_value), "r" (dest), "r" (compare_value)
131 : "memory");
132 return rv;
133 }
134
135 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) {
136 jlong rv;
137 __asm__ volatile(
138 " casx [%2], %3, %0"
139 : "=r" (rv)
140 : "0" (exchange_value), "r" (dest), "r" (compare_value)
141 : "memory");
142 return rv;
143 }
144
145 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
146 intptr_t rv;
147 __asm__ volatile(
148 " casx [%2], %3, %0"
149 : "=r" (rv)
150 : "0" (exchange_value), "r" (dest), "r" (compare_value)
151 : "memory");
152 return rv;
153 }
154
155 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) {
156 return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value, order);
157 }
158
159 #endif // OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
|
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
26 #define OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
27
28 // Implementation of class atomic
29
30
31 template <>
32 inline int32_t Atomic::specialized_add<int32_t>(int32_t add_value, volatile int32_t* dest) {
33 intptr_t rv;
34 __asm__ volatile(
35 "1: \n\t"
36 " ld [%2], %%o2\n\t"
37 " add %1, %%o2, %%o3\n\t"
38 " cas [%2], %%o2, %%o3\n\t"
39 " cmp %%o2, %%o3\n\t"
40 " bne 1b\n\t"
41 " nop\n\t"
42 " add %1, %%o2, %0\n\t"
43 : "=r" (rv)
44 : "r" (add_value), "r" (dest)
45 : "memory", "o2", "o3");
46 return rv;
47 }
48
49
50 template <>
51 inline int64_t Atomic::specialized_add<int64_t>(int64_t add_value, volatile int64_t* dest) {
52 intptr_t rv;
53 __asm__ volatile(
54 "1: \n\t"
55 " ldx [%2], %%o2\n\t"
56 " add %1, %%o2, %%o3\n\t"
57 " casx [%2], %%o2, %%o3\n\t"
58 " cmp %%o2, %%o3\n\t"
59 " bne %%xcc, 1b\n\t"
60 " nop\n\t"
61 " add %1, %%o2, %0\n\t"
62 : "=r" (rv)
63 : "r" (add_value), "r" (dest)
64 : "memory", "o2", "o3");
65 return rv;
66 }
67
68
69 template <>
70 inline int32_t Atomic::specialized_xchg<int32_t>(int32_t exchange_value, volatile int32_t* dest) {
71 intptr_t rv = exchange_value;
72 __asm__ volatile(
73 " swap [%2],%1\n\t"
74 : "=r" (rv)
75 : "0" (exchange_value) /* we use same register as for return value */, "r" (dest)
76 : "memory");
77 return rv;
78 }
79
80
81 template <>
82 inline int64_t Atomic::specialized_xchg<int64_t>(int64_t exchange_value, volatile int64_t* dest) {
83 intptr_t rv = exchange_value;
84 __asm__ volatile(
85 "1:\n\t"
86 " mov %1, %%o3\n\t"
87 " ldx [%2], %%o2\n\t"
88 " casx [%2], %%o2, %%o3\n\t"
89 " cmp %%o2, %%o3\n\t"
90 " bne %%xcc, 1b\n\t"
91 " nop\n\t"
92 " mov %%o2, %0\n\t"
93 : "=r" (rv)
94 : "r" (exchange_value), "r" (dest)
95 : "memory", "o2", "o3");
96 return rv;
97 }
98
99
100 template <>
101 inline int32_t Atomic::specialized_cmpxchg<int32_t>(int32_t exchange_value, volatile int32_t* dest, int32_t compare_value, cmpxchg_memory_order order) {
102 int32_t rv;
103 __asm__ volatile(
104 " cas [%2], %3, %0"
105 : "=r" (rv)
106 : "0" (exchange_value), "r" (dest), "r" (compare_value)
107 : "memory");
108 return rv;
109 }
110
111
112 template <>
113 inline int64_t Atomic::specialized_cmpxchg<int64_t>(int64_t exchange_value, volatile int64_t* dest, int64_t compare_value, cmpxchg_memory_order order) {
114 int64_t rv;
115 __asm__ volatile(
116 " casx [%2], %3, %0"
117 : "=r" (rv)
118 : "0" (exchange_value), "r" (dest), "r" (compare_value)
119 : "memory");
120 return rv;
121 }
122
123 #endif // OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
|