1 /* 2 * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP 26 #define OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP 27 28 #include "runtime/os.hpp" 29 30 // Implementation of class atomic 31 32 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } 33 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } 34 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } 35 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } 36 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } 37 38 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } 39 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } 40 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } 41 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } 42 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } 43 44 45 inline jint Atomic::add (jint add_value, volatile jint* dest) { 46 jint addend = add_value; 47 __asm__ volatile ( "lock xaddl %0,(%2)" 48 : "=r" (addend) 49 : "0" (addend), "r" (dest) 50 : "cc", "memory"); 51 return addend + add_value; 52 } 53 54 inline void Atomic::inc (volatile jint* dest) { 55 __asm__ volatile ( "lock addl $1,(%0)" : 56 : "r" (dest) : "cc", "memory"); 57 } 58 59 inline void Atomic::inc_ptr(volatile void* dest) { 60 inc_ptr((volatile intptr_t*)dest); 61 } 62 63 inline void Atomic::dec (volatile jint* dest) { 64 __asm__ volatile ( "lock subl $1,(%0)" : 65 : "r" (dest) : "cc", "memory"); 66 } 67 68 inline void Atomic::dec_ptr(volatile void* dest) { 69 dec_ptr((volatile intptr_t*)dest); 70 } 71 72 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) { 73 __asm__ volatile ( "xchgl (%2),%0" 74 : "=r" (exchange_value) 75 : "0" (exchange_value), "r" (dest) 76 : "memory"); 77 return exchange_value; 78 } 79 80 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 81 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest); 82 } 83 84 #define VM_HAS_SPECIALIZED_CMPXCHG_BYTE 85 inline jbyte Atomic::cmpxchg (jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, cmpxchg_memory_order order) { 86 __asm__ volatile ( "lock cmpxchgb %1,(%3)" 87 : "=a" (exchange_value) 88 : "q" (exchange_value), "a" (compare_value), "r" (dest) 89 : "cc", "memory"); 90 return exchange_value; 91 } 92 93 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) { 94 __asm__ volatile ( "lock cmpxchgl %1,(%3)" 95 : "=a" (exchange_value) 96 : "r" (exchange_value), "a" (compare_value), "r" (dest) 97 : "cc", "memory"); 98 return exchange_value; 99 } 100 101 #ifdef AMD64 102 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } 103 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } 104 105 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 106 intptr_t addend = add_value; 107 __asm__ __volatile__ ( "lock xaddq %0,(%2)" 108 : "=r" (addend) 109 : "0" (addend), "r" (dest) 110 : "cc", "memory"); 111 return addend + add_value; 112 } 113 114 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 115 return (void*)add_ptr(add_value, (volatile intptr_t*)dest); 116 } 117 118 inline void Atomic::inc_ptr(volatile intptr_t* dest) { 119 __asm__ __volatile__ ( "lock addq $1,(%0)" 120 : 121 : "r" (dest) 122 : "cc", "memory"); 123 } 124 125 inline void Atomic::dec_ptr(volatile intptr_t* dest) { 126 __asm__ __volatile__ ( "lock subq $1,(%0)" 127 : 128 : "r" (dest) 129 : "cc", "memory"); 130 } 131 132 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 133 __asm__ __volatile__ ("xchgq (%2),%0" 134 : "=r" (exchange_value) 135 : "0" (exchange_value), "r" (dest) 136 : "memory"); 137 return exchange_value; 138 } 139 140 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) { 141 __asm__ __volatile__ ( "lock cmpxchgq %1,(%3)" 142 : "=a" (exchange_value) 143 : "r" (exchange_value), "a" (compare_value), "r" (dest) 144 : "cc", "memory"); 145 return exchange_value; 146 } 147 148 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) { 149 return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); 150 } 151 152 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) { 153 return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); 154 } 155 156 inline jlong Atomic::load(volatile jlong* src) { return *src; } 157 158 #else // !AMD64 159 160 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 161 return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest); 162 } 163 164 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 165 return (void*)Atomic::add((jint)add_value, (volatile jint*)dest); 166 } 167 168 169 inline void Atomic::inc_ptr(volatile intptr_t* dest) { 170 inc((volatile jint*)dest); 171 } 172 173 inline void Atomic::dec_ptr(volatile intptr_t* dest) { 174 dec((volatile jint*)dest); 175 } 176 177 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 178 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest); 179 } 180 181 extern "C" { 182 // defined in bsd_x86.s 183 jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool); 184 void _Atomic_move_long(volatile jlong* src, volatile jlong* dst); 185 } 186 187 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) { 188 return _Atomic_cmpxchg_long(exchange_value, dest, compare_value, os::is_MP()); 189 } 190 191 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) { 192 return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order); 193 } 194 195 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) { 196 return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order); 197 } 198 199 inline jlong Atomic::load(volatile jlong* src) { 200 volatile jlong dest; 201 _Atomic_move_long(src, &dest); 202 return dest; 203 } 204 205 inline void Atomic::store(jlong store_value, jlong* dest) { 206 _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest); 207 } 208 209 inline void Atomic::store(jlong store_value, volatile jlong* dest) { 210 _Atomic_move_long((volatile jlong*)&store_value, dest); 211 } 212 213 #endif // AMD64 214 215 #endif // OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP