1 /* 2 * Copyright (c) 1999, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP 26 #define OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP 27 28 #include "runtime/atomic.hpp" 29 #include "runtime/os.hpp" 30 31 // Implementation of class atomic 32 33 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } 34 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } 35 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } 36 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } 37 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } 38 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } 39 40 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } 41 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } 42 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } 43 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } 44 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } 45 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } 46 47 inline void Atomic::inc (volatile jint* dest) { (void)add (1, dest); } 48 inline void Atomic::inc_ptr(volatile intptr_t* dest) { (void)add_ptr(1, dest); } 49 inline void Atomic::inc_ptr(volatile void* dest) { (void)add_ptr(1, dest); } 50 51 inline void Atomic::dec (volatile jint* dest) { (void)add (-1, dest); } 52 inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); } 53 inline void Atomic::dec_ptr(volatile void* dest) { (void)add_ptr(-1, dest); } 54 55 inline jlong Atomic::load(volatile jlong* src) { return *src; } 56 57 inline jint Atomic::add (jint add_value, volatile jint* dest) { 58 intptr_t rv; 59 __asm__ volatile( 60 "1: \n\t" 61 " ld [%2], %%o2\n\t" 62 " add %1, %%o2, %%o3\n\t" 63 " cas [%2], %%o2, %%o3\n\t" 64 " cmp %%o2, %%o3\n\t" 65 " bne 1b\n\t" 66 " nop\n\t" 67 " add %1, %%o2, %0\n\t" 68 : "=r" (rv) 69 : "r" (add_value), "r" (dest) 70 : "memory", "o2", "o3"); 71 return rv; 72 } 73 74 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 75 intptr_t rv; 76 #ifdef _LP64 77 __asm__ volatile( 78 "1: \n\t" 79 " ldx [%2], %%o2\n\t" 80 " add %1, %%o2, %%o3\n\t" 81 " casx [%2], %%o2, %%o3\n\t" 82 " cmp %%o2, %%o3\n\t" 83 " bne %%xcc, 1b\n\t" 84 " nop\n\t" 85 " add %1, %%o2, %0\n\t" 86 : "=r" (rv) 87 : "r" (add_value), "r" (dest) 88 : "memory", "o2", "o3"); 89 #else 90 __asm__ volatile( 91 "1: \n\t" 92 " ld [%2], %%o2\n\t" 93 " add %1, %%o2, %%o3\n\t" 94 " cas [%2], %%o2, %%o3\n\t" 95 " cmp %%o2, %%o3\n\t" 96 " bne 1b\n\t" 97 " nop\n\t" 98 " add %1, %%o2, %0\n\t" 99 : "=r" (rv) 100 : "r" (add_value), "r" (dest) 101 : "memory", "o2", "o3"); 102 #endif // _LP64 103 return rv; 104 } 105 106 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 107 return (void*)add_ptr((intptr_t)add_value, (volatile intptr_t*)dest); 108 } 109 110 111 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) { 112 intptr_t rv = exchange_value; 113 __asm__ volatile( 114 " swap [%2],%1\n\t" 115 : "=r" (rv) 116 : "0" (exchange_value) /* we use same register as for return value */, "r" (dest) 117 : "memory"); 118 return rv; 119 } 120 121 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 122 intptr_t rv = exchange_value; 123 #ifdef _LP64 124 __asm__ volatile( 125 "1:\n\t" 126 " mov %1, %%o3\n\t" 127 " ldx [%2], %%o2\n\t" 128 " casx [%2], %%o2, %%o3\n\t" 129 " cmp %%o2, %%o3\n\t" 130 " bne %%xcc, 1b\n\t" 131 " nop\n\t" 132 " mov %%o2, %0\n\t" 133 : "=r" (rv) 134 : "r" (exchange_value), "r" (dest) 135 : "memory", "o2", "o3"); 136 #else 137 __asm__ volatile( 138 "swap [%2],%1\n\t" 139 : "=r" (rv) 140 : "0" (exchange_value) /* we use same register as for return value */, "r" (dest) 141 : "memory"); 142 #endif // _LP64 143 return rv; 144 } 145 146 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 147 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest); 148 } 149 150 151 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) { 152 jint rv; 153 __asm__ volatile( 154 " cas [%2], %3, %0" 155 : "=r" (rv) 156 : "0" (exchange_value), "r" (dest), "r" (compare_value) 157 : "memory"); 158 return rv; 159 } 160 161 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) { 162 #ifdef _LP64 163 jlong rv; 164 __asm__ volatile( 165 " casx [%2], %3, %0" 166 : "=r" (rv) 167 : "0" (exchange_value), "r" (dest), "r" (compare_value) 168 : "memory"); 169 return rv; 170 #else 171 volatile jlong_accessor evl, cvl, rv; 172 evl.long_value = exchange_value; 173 cvl.long_value = compare_value; 174 175 __asm__ volatile( 176 " sllx %2, 32, %2\n\t" 177 " srl %3, 0, %3\n\t" 178 " or %2, %3, %2\n\t" 179 " sllx %5, 32, %5\n\t" 180 " srl %6, 0, %6\n\t" 181 " or %5, %6, %5\n\t" 182 " casx [%4], %5, %2\n\t" 183 " srl %2, 0, %1\n\t" 184 " srlx %2, 32, %0\n\t" 185 : "=r" (rv.words[0]), "=r" (rv.words[1]) 186 : "r" (evl.words[0]), "r" (evl.words[1]), "r" (dest), "r" (cvl.words[0]), "r" (cvl.words[1]) 187 : "memory"); 188 189 return rv.long_value; 190 #endif 191 } 192 193 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) { 194 intptr_t rv; 195 #ifdef _LP64 196 __asm__ volatile( 197 " casx [%2], %3, %0" 198 : "=r" (rv) 199 : "0" (exchange_value), "r" (dest), "r" (compare_value) 200 : "memory"); 201 #else 202 __asm__ volatile( 203 " cas [%2], %3, %0" 204 : "=r" (rv) 205 : "0" (exchange_value), "r" (dest), "r" (compare_value) 206 : "memory"); 207 #endif // _LP64 208 return rv; 209 } 210 211 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) { 212 return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value, order); 213 } 214 215 #endif // OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP