1 /* 2 * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP 26 #define OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP 27 28 // Implementation of class atomic 29 30 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } 31 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } 32 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } 33 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } 34 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } 35 36 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } 37 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } 38 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } 39 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } 40 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } 41 42 43 template<size_t byte_size> 44 struct Atomic::PlatformAdd 45 : Atomic::FetchAndAdd<Atomic::PlatformAdd<byte_size> > 46 { 47 template<typename I, typename D> 48 D fetch_and_add(I add_value, D volatile* dest) const; 49 }; 50 51 template<> 52 template<typename I, typename D> 53 inline D Atomic::PlatformAdd<4>::fetch_and_add(I add_value, D volatile* dest) const { 54 STATIC_ASSERT(4 == sizeof(I)); 55 STATIC_ASSERT(4 == sizeof(D)); 56 D old_value; 57 __asm__ volatile ( "lock xaddl %0,(%2)" 58 : "=r" (old_value) 59 : "0" (add_value), "r" (dest) 60 : "cc", "memory"); 61 return old_value; 62 } 63 64 template<> 65 struct Atomic::PlatformAdd<2>: Atomic::AddShortUsingInt {}; 66 67 inline void Atomic::inc (volatile jint* dest) { 68 __asm__ volatile ( "lock addl $1,(%0)" : 69 : "r" (dest) : "cc", "memory"); 70 } 71 72 inline void Atomic::inc_ptr(volatile void* dest) { 73 inc_ptr((volatile intptr_t*)dest); 74 } 75 76 inline void Atomic::dec (volatile jint* dest) { 77 __asm__ volatile ( "lock subl $1,(%0)" : 78 : "r" (dest) : "cc", "memory"); 79 } 80 81 inline void Atomic::dec_ptr(volatile void* dest) { 82 dec_ptr((volatile intptr_t*)dest); 83 } 84 85 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) { 86 __asm__ volatile ( "xchgl (%2),%0" 87 : "=r" (exchange_value) 88 : "0" (exchange_value), "r" (dest) 89 : "memory"); 90 return exchange_value; 91 } 92 93 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 94 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest); 95 } 96 97 template<> 98 template<typename T> 99 inline T Atomic::PlatformCmpxchg<1>::operator()(T exchange_value, 100 T volatile* dest, 101 T compare_value, 102 cmpxchg_memory_order /* order */) const { 103 STATIC_ASSERT(1 == sizeof(T)); 104 __asm__ volatile ( "lock cmpxchgb %1,(%3)" 105 : "=a" (exchange_value) 106 : "q" (exchange_value), "a" (compare_value), "r" (dest) 107 : "cc", "memory"); 108 return exchange_value; 109 } 110 111 template<> 112 template<typename T> 113 inline T Atomic::PlatformCmpxchg<4>::operator()(T exchange_value, 114 T volatile* dest, 115 T compare_value, 116 cmpxchg_memory_order /* order */) const { 117 STATIC_ASSERT(4 == sizeof(T)); 118 __asm__ volatile ( "lock cmpxchgl %1,(%3)" 119 : "=a" (exchange_value) 120 : "r" (exchange_value), "a" (compare_value), "r" (dest) 121 : "cc", "memory"); 122 return exchange_value; 123 } 124 125 #ifdef AMD64 126 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } 127 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } 128 129 template<> 130 template<typename I, typename D> 131 inline D Atomic::PlatformAdd<8>::fetch_and_add(I add_value, D volatile* dest) const { 132 STATIC_ASSERT(8 == sizeof(I)); 133 STATIC_ASSERT(8 == sizeof(D)); 134 D old_value; 135 __asm__ __volatile__ ( "lock xaddq %0,(%2)" 136 : "=r" (old_value) 137 : "0" (add_value), "r" (dest) 138 : "cc", "memory"); 139 return old_value; 140 } 141 142 inline void Atomic::inc_ptr(volatile intptr_t* dest) { 143 __asm__ __volatile__ ( "lock addq $1,(%0)" 144 : 145 : "r" (dest) 146 : "cc", "memory"); 147 } 148 149 inline void Atomic::dec_ptr(volatile intptr_t* dest) { 150 __asm__ __volatile__ ( "lock subq $1,(%0)" 151 : 152 : "r" (dest) 153 : "cc", "memory"); 154 } 155 156 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 157 __asm__ __volatile__ ("xchgq (%2),%0" 158 : "=r" (exchange_value) 159 : "0" (exchange_value), "r" (dest) 160 : "memory"); 161 return exchange_value; 162 } 163 164 template<> 165 template<typename T> 166 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value, 167 T volatile* dest, 168 T compare_value, 169 cmpxchg_memory_order /* order */) const { 170 STATIC_ASSERT(8 == sizeof(T)); 171 __asm__ __volatile__ ( "lock cmpxchgq %1,(%3)" 172 : "=a" (exchange_value) 173 : "r" (exchange_value), "a" (compare_value), "r" (dest) 174 : "cc", "memory"); 175 return exchange_value; 176 } 177 178 inline jlong Atomic::load(const volatile jlong* src) { return *src; } 179 180 #else // !AMD64 181 182 inline void Atomic::inc_ptr(volatile intptr_t* dest) { 183 inc((volatile jint*)dest); 184 } 185 186 inline void Atomic::dec_ptr(volatile intptr_t* dest) { 187 dec((volatile jint*)dest); 188 } 189 190 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 191 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest); 192 } 193 194 extern "C" { 195 // defined in bsd_x86.s 196 jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool); 197 void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst); 198 } 199 200 template<> 201 template<typename T> 202 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value, 203 T volatile* dest, 204 T compare_value, 205 cmpxchg_memory_order order) const { 206 STATIC_ASSERT(8 == sizeof(T)); 207 return cmpxchg_using_helper<jlong>(_Atomic_cmpxchg_long, exchange_value, dest, compare_value); 208 } 209 210 inline jlong Atomic::load(const volatile jlong* src) { 211 volatile jlong dest; 212 _Atomic_move_long(src, &dest); 213 return dest; 214 } 215 216 inline void Atomic::store(jlong store_value, jlong* dest) { 217 _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest); 218 } 219 220 inline void Atomic::store(jlong store_value, volatile jlong* dest) { 221 _Atomic_move_long((volatile jlong*)&store_value, dest); 222 } 223 224 #endif // AMD64 225 226 #endif // OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP