1 /* 2 * Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP 26 #define OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP 27 28 #include "orderAccess_solaris_x86.inline.hpp" 29 #include "runtime/atomic.hpp" 30 #include "runtime/os.hpp" 31 #include "vm_version_x86.hpp" 32 33 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } 34 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } 35 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } 36 37 38 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } 39 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } 40 41 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } 42 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } 43 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } 44 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } 45 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } 46 47 inline void Atomic::inc (volatile jint* dest) { (void)add (1, dest); } 48 inline void Atomic::inc_ptr(volatile intptr_t* dest) { (void)add_ptr(1, dest); } 49 inline void Atomic::inc_ptr(volatile void* dest) { (void)add_ptr(1, dest); } 50 51 inline void Atomic::dec (volatile jint* dest) { (void)add (-1, dest); } 52 inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); } 53 inline void Atomic::dec_ptr(volatile void* dest) { (void)add_ptr(-1, dest); } 54 55 // For Sun Studio - implementation is in solaris_x86_[32/64].il. 56 // For gcc - implementation is just below. 57 58 // The lock prefix can be omitted for certain instructions on uniprocessors; to 59 // facilitate this, os::is_MP() is passed as an additional argument. 64-bit 60 // processors are assumed to be multi-threaded and/or multi-core, so the extra 61 // argument is unnecessary. 62 #ifndef _LP64 63 #define IS_MP_DECL() , int is_mp 64 #define IS_MP_ARG() , (int) os::is_MP() 65 #else 66 #define IS_MP_DECL() 67 #define IS_MP_ARG() 68 #endif // _LP64 69 70 extern "C" { 71 jint _Atomic_add(jint add_value, volatile jint* dest IS_MP_DECL()); 72 jint _Atomic_xchg(jint exchange_value, volatile jint* dest); 73 jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest, 74 jint compare_value IS_MP_DECL()); 75 jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest, 76 jlong compare_value IS_MP_DECL()); 77 } 78 79 inline jint Atomic::add (jint add_value, volatile jint* dest) { 80 return _Atomic_add(add_value, dest IS_MP_ARG()); 81 } 82 83 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) { 84 return _Atomic_xchg(exchange_value, dest); 85 } 86 87 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) { 88 return _Atomic_cmpxchg(exchange_value, dest, compare_value IS_MP_ARG()); 89 } 90 91 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) { 92 return _Atomic_cmpxchg_long(exchange_value, dest, compare_value IS_MP_ARG()); 93 } 94 95 96 #ifdef AMD64 97 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } 98 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } 99 extern "C" jlong _Atomic_add_long(jlong add_value, volatile jlong* dest); 100 extern "C" jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest); 101 102 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 103 return (intptr_t)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest); 104 } 105 106 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 107 return (void*)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest); 108 } 109 110 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 111 return (intptr_t)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest); 112 } 113 114 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 115 return (void*)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest); 116 } 117 118 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) { 119 return (intptr_t)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value); 120 } 121 122 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) { 123 return (void*)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value); 124 } 125 126 inline jlong Atomic::load(volatile jlong* src) { return *src; } 127 128 #else // !AMD64 129 130 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 131 return (intptr_t)add((jint)add_value, (volatile jint*)dest); 132 } 133 134 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 135 return (void*)add((jint)add_value, (volatile jint*)dest); 136 } 137 138 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 139 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest); 140 } 141 142 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 143 return (void*)xchg((jint)exchange_value, (volatile jint*)dest); 144 } 145 146 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) { 147 return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value); 148 } 149 150 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) { 151 return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value); 152 } 153 154 extern "C" void _Atomic_load_long(volatile jlong* src, volatile jlong* dst); 155 156 inline jlong Atomic::load(volatile jlong* src) { 157 volatile jlong dest; 158 _Atomic_load_long(src, &dest); 159 return dest; 160 } 161 162 #endif // AMD64 163 164 #ifdef _GNU_SOURCE 165 // Add a lock prefix to an instruction on an MP machine 166 #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: " 167 168 extern "C" { 169 inline jint _Atomic_add(jint add_value, volatile jint* dest, int mp) { 170 jint addend = add_value; 171 __asm__ volatile ( LOCK_IF_MP(%3) "xaddl %0,(%2)" 172 : "=r" (addend) 173 : "0" (addend), "r" (dest), "r" (mp) 174 : "cc", "memory"); 175 return addend + add_value; 176 } 177 178 #ifdef AMD64 179 inline jlong _Atomic_add_long(jlong add_value, volatile jlong* dest, int mp) { 180 intptr_t addend = add_value; 181 __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)" 182 : "=r" (addend) 183 : "0" (addend), "r" (dest), "r" (mp) 184 : "cc", "memory"); 185 return addend + add_value; 186 } 187 188 inline jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest) { 189 __asm__ __volatile__ ("xchgq (%2),%0" 190 : "=r" (exchange_value) 191 : "0" (exchange_value), "r" (dest) 192 : "memory"); 193 return exchange_value; 194 } 195 196 #endif // AMD64 197 198 inline jint _Atomic_xchg(jint exchange_value, volatile jint* dest) { 199 __asm__ __volatile__ ("xchgl (%2),%0" 200 : "=r" (exchange_value) 201 : "0" (exchange_value), "r" (dest) 202 : "memory"); 203 return exchange_value; 204 } 205 206 inline jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest, jint compare_value, int mp) { 207 __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)" 208 : "=a" (exchange_value) 209 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) 210 : "cc", "memory"); 211 return exchange_value; 212 } 213 214 // This is the interface to the atomic instruction in solaris_i486.s. 215 jlong _Atomic_cmpxchg_long_gcc(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp); 216 217 inline jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp) { 218 #ifdef AMD64 219 __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)" 220 : "=a" (exchange_value) 221 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) 222 : "cc", "memory"); 223 return exchange_value; 224 #else 225 return _Atomic_cmpxchg_long_gcc(exchange_value, dest, compare_value, os::is_MP()); 226 227 #if 0 228 // The code below does not work presumably because of the bug in gcc 229 // The error message says: 230 // can't find a register in class BREG while reloading asm 231 // However I want to save this code and later replace _Atomic_cmpxchg_long_gcc 232 // with such inline asm code: 233 234 volatile jlong_accessor evl, cvl, rv; 235 evl.long_value = exchange_value; 236 cvl.long_value = compare_value; 237 int mp = os::is_MP(); 238 239 __asm__ volatile ("cmp $0, %%esi\n\t" 240 "je 1f \n\t" 241 "lock\n\t" 242 "1: cmpxchg8b (%%edi)\n\t" 243 : "=a"(cvl.words[0]), "=d"(cvl.words[1]) 244 : "a"(cvl.words[0]), "d"(cvl.words[1]), 245 "b"(evl.words[0]), "c"(evl.words[1]), 246 "D"(dest), "S"(mp) 247 : "cc", "memory"); 248 return cvl.long_value; 249 #endif // if 0 250 #endif // AMD64 251 } 252 } 253 #undef LOCK_IF_MP 254 255 #endif // _GNU_SOURCE 256 257 #endif // OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_INLINE_HPP