1 /* 2 * Copyright (c) 1999, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP 26 #define OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP 27 28 #include "runtime/atomic.hpp" 29 #include "runtime/os.hpp" 30 31 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } 32 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } 33 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } 34 35 36 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } 37 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } 38 39 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } 40 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } 41 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } 42 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } 43 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } 44 45 inline void Atomic::inc (volatile jint* dest) { (void)add (1, dest); } 46 inline void Atomic::inc_ptr(volatile intptr_t* dest) { (void)add_ptr(1, dest); } 47 inline void Atomic::inc_ptr(volatile void* dest) { (void)add_ptr(1, dest); } 48 49 inline void Atomic::dec (volatile jint* dest) { (void)add (-1, dest); } 50 inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); } 51 inline void Atomic::dec_ptr(volatile void* dest) { (void)add_ptr(-1, dest); } 52 53 // For Sun Studio - implementation is in solaris_x86_[32/64].il. 54 // For gcc - implementation is just below. 55 56 // The lock prefix can be omitted for certain instructions on uniprocessors; to 57 // facilitate this, os::is_MP() is passed as an additional argument. 64-bit 58 // processors are assumed to be multi-threaded and/or multi-core, so the extra 59 // argument is unnecessary. 60 #ifndef _LP64 61 #define IS_MP_DECL() , int is_mp 62 #define IS_MP_ARG() , (int) os::is_MP() 63 #else 64 #define IS_MP_DECL() 65 #define IS_MP_ARG() 66 #endif // _LP64 67 68 extern "C" { 69 jint _Atomic_add(jint add_value, volatile jint* dest IS_MP_DECL()); 70 jint _Atomic_xchg(jint exchange_value, volatile jint* dest); 71 jbyte _Atomic_cmpxchg_byte(jbyte exchange_value, volatile jbyte* dest, 72 jbyte compare_value IS_MP_DECL()); 73 jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest, 74 jint compare_value IS_MP_DECL()); 75 jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest, 76 jlong compare_value IS_MP_DECL()); 77 } 78 79 inline jint Atomic::add (jint add_value, volatile jint* dest) { 80 return _Atomic_add(add_value, dest IS_MP_ARG()); 81 } 82 83 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) { 84 return _Atomic_xchg(exchange_value, dest); 85 } 86 87 #define VM_HAS_SPECIALIZED_CMPXCHG_BYTE 88 inline jbyte Atomic::cmpxchg (jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, memory_order order) { 89 return _Atomic_cmpxchg_byte(exchange_value, dest, compare_value IS_MP_ARG()); 90 } 91 92 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, memory_order order) { 93 return _Atomic_cmpxchg(exchange_value, dest, compare_value IS_MP_ARG()); 94 } 95 96 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, memory_order order) { 97 return _Atomic_cmpxchg_long(exchange_value, dest, compare_value IS_MP_ARG()); 98 } 99 100 101 #ifdef AMD64 102 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } 103 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } 104 extern "C" jlong _Atomic_add_long(jlong add_value, volatile jlong* dest); 105 extern "C" jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest); 106 107 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 108 return (intptr_t)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest); 109 } 110 111 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 112 return (void*)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest); 113 } 114 115 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 116 return (intptr_t)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest); 117 } 118 119 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 120 return (void*)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest); 121 } 122 123 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, memory_order order) { 124 return (intptr_t)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); 125 } 126 127 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, memory_order order) { 128 return (void*)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); 129 } 130 131 inline jlong Atomic::load(volatile jlong* src) { return *src; } 132 133 #else // !AMD64 134 135 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 136 return (intptr_t)add((jint)add_value, (volatile jint*)dest); 137 } 138 139 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 140 return (void*)add((jint)add_value, (volatile jint*)dest); 141 } 142 143 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 144 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest); 145 } 146 147 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 148 return (void*)xchg((jint)exchange_value, (volatile jint*)dest); 149 } 150 151 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, memory_order order) { 152 return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order); 153 } 154 155 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, memory_order order) { 156 return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order); 157 } 158 159 extern "C" void _Atomic_move_long(volatile jlong* src, volatile jlong* dst); 160 161 inline jlong Atomic::load(volatile jlong* src) { 162 volatile jlong dest; 163 _Atomic_move_long(src, &dest); 164 return dest; 165 } 166 167 inline void Atomic::store(jlong store_value, jlong* dest) { 168 _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest); 169 } 170 171 inline void Atomic::store(jlong store_value, volatile jlong* dest) { 172 _Atomic_move_long((volatile jlong*)&store_value, dest); 173 } 174 175 #endif // AMD64 176 177 #ifdef _GNU_SOURCE 178 // Add a lock prefix to an instruction on an MP machine 179 #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: " 180 181 extern "C" { 182 inline jint _Atomic_add(jint add_value, volatile jint* dest, int mp) { 183 jint addend = add_value; 184 __asm__ volatile ( LOCK_IF_MP(%3) "xaddl %0,(%2)" 185 : "=r" (addend) 186 : "0" (addend), "r" (dest), "r" (mp) 187 : "cc", "memory"); 188 return addend + add_value; 189 } 190 191 #ifdef AMD64 192 inline jlong _Atomic_add_long(jlong add_value, volatile jlong* dest, int mp) { 193 intptr_t addend = add_value; 194 __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)" 195 : "=r" (addend) 196 : "0" (addend), "r" (dest), "r" (mp) 197 : "cc", "memory"); 198 return addend + add_value; 199 } 200 201 inline jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest) { 202 __asm__ __volatile__ ("xchgq (%2),%0" 203 : "=r" (exchange_value) 204 : "0" (exchange_value), "r" (dest) 205 : "memory"); 206 return exchange_value; 207 } 208 209 #endif // AMD64 210 211 inline jint _Atomic_xchg(jint exchange_value, volatile jint* dest) { 212 __asm__ __volatile__ ("xchgl (%2),%0" 213 : "=r" (exchange_value) 214 : "0" (exchange_value), "r" (dest) 215 : "memory"); 216 return exchange_value; 217 } 218 219 inline jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest, jint compare_value, int mp) { 220 __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)" 221 : "=a" (exchange_value) 222 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) 223 : "cc", "memory"); 224 return exchange_value; 225 } 226 227 228 inline jbyte _Atomic_cmpxchg_byte(jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, int mp) { 229 __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgb %1,(%3)" 230 : "=a" (exchange_value) 231 : "q" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) 232 : "cc", "memory"); 233 return exchange_value; 234 } 235 236 // This is the interface to the atomic instruction in solaris_i486.s. 237 jlong _Atomic_cmpxchg_long_gcc(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp); 238 239 inline jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp) { 240 #ifdef AMD64 241 __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)" 242 : "=a" (exchange_value) 243 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) 244 : "cc", "memory"); 245 return exchange_value; 246 #else 247 return _Atomic_cmpxchg_long_gcc(exchange_value, dest, compare_value, os::is_MP()); 248 249 #if 0 250 // The code below does not work presumably because of the bug in gcc 251 // The error message says: 252 // can't find a register in class BREG while reloading asm 253 // However I want to save this code and later replace _Atomic_cmpxchg_long_gcc 254 // with such inline asm code: 255 256 volatile jlong_accessor evl, cvl, rv; 257 evl.long_value = exchange_value; 258 cvl.long_value = compare_value; 259 int mp = os::is_MP(); 260 261 __asm__ volatile ("cmp $0, %%esi\n\t" 262 "je 1f \n\t" 263 "lock\n\t" 264 "1: cmpxchg8b (%%edi)\n\t" 265 : "=a"(cvl.words[0]), "=d"(cvl.words[1]) 266 : "a"(cvl.words[0]), "d"(cvl.words[1]), 267 "b"(evl.words[0]), "c"(evl.words[1]), 268 "D"(dest), "S"(mp) 269 : "cc", "memory"); 270 return cvl.long_value; 271 #endif // if 0 272 #endif // AMD64 273 } 274 } 275 #undef LOCK_IF_MP 276 277 #endif // _GNU_SOURCE 278 279 #endif // OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP