1 /* 2 * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP 26 #define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP 27 28 // Implementation of class atomic 29 30 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } 31 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } 32 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } 33 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } 34 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } 35 36 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } 37 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } 38 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } 39 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } 40 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } 41 42 43 template<size_t byte_size> 44 struct Atomic::PlatformAdd 45 : Atomic::FetchAndAdd<Atomic::PlatformAdd<byte_size> > 46 { 47 template<typename I, typename D> 48 D fetch_and_add(I add_value, D volatile* dest) const; 49 }; 50 51 template<> 52 template<typename I, typename D> 53 inline D Atomic::PlatformAdd<4>::fetch_and_add(I add_value, D volatile* dest) const { 54 STATIC_ASSERT(4 == sizeof(I)); 55 STATIC_ASSERT(4 == sizeof(D)); 56 D old_value; 57 __asm__ volatile ( "lock xaddl %0,(%2)" 58 : "=r" (old_value) 59 : "0" (add_value), "r" (dest) 60 : "cc", "memory"); 61 return old_value; 62 } 63 64 template<> 65 template<typename T> 66 inline T Atomic::PlatformXchg<4>::operator()(T exchange_value, 67 T volatile* dest) const { 68 STATIC_ASSERT(4 == sizeof(T)); 69 __asm__ volatile ( "xchgl (%2),%0" 70 : "=r" (exchange_value) 71 : "0" (exchange_value), "r" (dest) 72 : "memory"); 73 return exchange_value; 74 } 75 76 template<> 77 template<typename T> 78 inline T Atomic::PlatformCmpxchg<1>::operator()(T exchange_value, 79 T volatile* dest, 80 T compare_value, 81 cmpxchg_memory_order /* order */) const { 82 STATIC_ASSERT(1 == sizeof(T)); 83 __asm__ volatile ("lock cmpxchgb %1,(%3)" 84 : "=a" (exchange_value) 85 : "q" (exchange_value), "a" (compare_value), "r" (dest) 86 : "cc", "memory"); 87 return exchange_value; 88 } 89 90 template<> 91 template<typename T> 92 inline T Atomic::PlatformCmpxchg<4>::operator()(T exchange_value, 93 T volatile* dest, 94 T compare_value, 95 cmpxchg_memory_order /* order */) const { 96 STATIC_ASSERT(4 == sizeof(T)); 97 __asm__ volatile ("lock cmpxchgl %1,(%3)" 98 : "=a" (exchange_value) 99 : "r" (exchange_value), "a" (compare_value), "r" (dest) 100 : "cc", "memory"); 101 return exchange_value; 102 } 103 104 #ifdef AMD64 105 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } 106 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } 107 108 template<> 109 template<typename I, typename D> 110 inline D Atomic::PlatformAdd<8>::fetch_and_add(I add_value, D volatile* dest) const { 111 STATIC_ASSERT(8 == sizeof(I)); 112 STATIC_ASSERT(8 == sizeof(D)); 113 D old_value; 114 __asm__ __volatile__ ("lock xaddq %0,(%2)" 115 : "=r" (old_value) 116 : "0" (add_value), "r" (dest) 117 : "cc", "memory"); 118 return old_value; 119 } 120 121 template<> 122 template<typename T> 123 inline T Atomic::PlatformXchg<8>::operator()(T exchange_value, 124 T volatile* dest) const { 125 STATIC_ASSERT(8 == sizeof(T)); 126 __asm__ __volatile__ ("xchgq (%2),%0" 127 : "=r" (exchange_value) 128 : "0" (exchange_value), "r" (dest) 129 : "memory"); 130 return exchange_value; 131 } 132 133 template<> 134 template<typename T> 135 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value, 136 T volatile* dest, 137 T compare_value, 138 cmpxchg_memory_order /* order */) const { 139 STATIC_ASSERT(8 == sizeof(T)); 140 __asm__ __volatile__ ("lock cmpxchgq %1,(%3)" 141 : "=a" (exchange_value) 142 : "r" (exchange_value), "a" (compare_value), "r" (dest) 143 : "cc", "memory"); 144 return exchange_value; 145 } 146 147 inline jlong Atomic::load(const volatile jlong* src) { return *src; } 148 149 #else // !AMD64 150 151 extern "C" { 152 // defined in linux_x86.s 153 jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong); 154 void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst); 155 } 156 157 template<> 158 template<typename T> 159 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value, 160 T volatile* dest, 161 T compare_value, 162 cmpxchg_memory_order order) const { 163 STATIC_ASSERT(8 == sizeof(T)); 164 return cmpxchg_using_helper<jlong>(_Atomic_cmpxchg_long, exchange_value, dest, compare_value); 165 } 166 167 inline jlong Atomic::load(const volatile jlong* src) { 168 volatile jlong dest; 169 _Atomic_move_long(src, &dest); 170 return dest; 171 } 172 173 inline void Atomic::store(jlong store_value, jlong* dest) { 174 _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest); 175 } 176 177 inline void Atomic::store(jlong store_value, volatile jlong* dest) { 178 _Atomic_move_long((volatile jlong*)&store_value, dest); 179 } 180 181 #endif // AMD64 182 183 #endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP