25 #ifndef OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP 26 #define OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP 27 28 #include "runtime/os.hpp" 29 30 // Implementation of class atomic 31 32 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } 33 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } 34 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } 35 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } 36 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } 37 38 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } 39 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } 40 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } 41 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } 42 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } 43 44 45 // Adding a lock prefix to an instruction on MP machine 46 #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: " 47 48 inline jint Atomic::add (jint add_value, volatile jint* dest) { 49 jint addend = add_value; 50 int mp = os::is_MP(); 51 __asm__ volatile ( LOCK_IF_MP(%3) "xaddl %0,(%2)" 52 : "=r" (addend) 53 : "0" (addend), "r" (dest), "r" (mp) 54 : "cc", "memory"); 55 return addend + add_value; 56 } 57 58 inline void Atomic::inc (volatile jint* dest) { 59 int mp = os::is_MP(); 60 __asm__ volatile (LOCK_IF_MP(%1) "addl $1,(%0)" : 61 : "r" (dest), "r" (mp) : "cc", "memory"); 62 } 63 64 inline void Atomic::inc_ptr(volatile void* dest) { 65 inc_ptr((volatile intptr_t*)dest); 66 } 67 68 inline void Atomic::dec (volatile jint* dest) { 69 int mp = os::is_MP(); 70 __asm__ volatile (LOCK_IF_MP(%1) "subl $1,(%0)" : 71 : "r" (dest), "r" (mp) : "cc", "memory"); 72 } 73 74 inline void Atomic::dec_ptr(volatile void* dest) { 75 dec_ptr((volatile intptr_t*)dest); 76 } 77 78 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) { 79 __asm__ volatile ( "xchgl (%2),%0" 80 : "=r" (exchange_value) 81 : "0" (exchange_value), "r" (dest) 82 : "memory"); 83 return exchange_value; 84 } 85 86 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 87 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest); 88 } 89 90 #define VM_HAS_SPECIALIZED_CMPXCHG_BYTE 91 inline jbyte Atomic::cmpxchg (jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, cmpxchg_memory_order order) { 92 int mp = os::is_MP(); 93 __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgb %1,(%3)" 94 : "=a" (exchange_value) 95 : "q" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) 96 : "cc", "memory"); 97 return exchange_value; 98 } 99 100 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) { 101 int mp = os::is_MP(); 102 __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)" 103 : "=a" (exchange_value) 104 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) 105 : "cc", "memory"); 106 return exchange_value; 107 } 108 109 #ifdef AMD64 110 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } 111 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } 112 113 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 114 intptr_t addend = add_value; 115 bool mp = os::is_MP(); 116 __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)" 117 : "=r" (addend) 118 : "0" (addend), "r" (dest), "r" (mp) 119 : "cc", "memory"); 120 return addend + add_value; 121 } 122 123 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 124 return (void*)add_ptr(add_value, (volatile intptr_t*)dest); 125 } 126 127 inline void Atomic::inc_ptr(volatile intptr_t* dest) { 128 bool mp = os::is_MP(); 129 __asm__ __volatile__ (LOCK_IF_MP(%1) "addq $1,(%0)" 130 : 131 : "r" (dest), "r" (mp) 132 : "cc", "memory"); 133 } 134 135 inline void Atomic::dec_ptr(volatile intptr_t* dest) { 136 bool mp = os::is_MP(); 137 __asm__ __volatile__ (LOCK_IF_MP(%1) "subq $1,(%0)" 138 : 139 : "r" (dest), "r" (mp) 140 : "cc", "memory"); 141 } 142 143 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 144 __asm__ __volatile__ ("xchgq (%2),%0" 145 : "=r" (exchange_value) 146 : "0" (exchange_value), "r" (dest) 147 : "memory"); 148 return exchange_value; 149 } 150 151 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) { 152 bool mp = os::is_MP(); 153 __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)" 154 : "=a" (exchange_value) 155 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) 156 : "cc", "memory"); 157 return exchange_value; 158 } 159 160 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) { 161 return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); 162 } 163 164 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) { 165 return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); 166 } 167 168 inline jlong Atomic::load(volatile jlong* src) { return *src; } 169 170 #else // !AMD64 171 172 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 173 return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest); 174 } 175 | 25 #ifndef OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP 26 #define OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_HPP 27 28 #include "runtime/os.hpp" 29 30 // Implementation of class atomic 31 32 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } 33 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } 34 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } 35 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } 36 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } 37 38 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } 39 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } 40 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } 41 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } 42 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } 43 44 45 inline jint Atomic::add (jint add_value, volatile jint* dest) { 46 jint addend = add_value; 47 __asm__ volatile ( "lock xaddl %0,(%2)" 48 : "=r" (addend) 49 : "0" (addend), "r" (dest) 50 : "cc", "memory"); 51 return addend + add_value; 52 } 53 54 inline void Atomic::inc (volatile jint* dest) { 55 __asm__ volatile ( "lock addl $1,(%0)" : 56 : "r" (dest) : "cc", "memory"); 57 } 58 59 inline void Atomic::inc_ptr(volatile void* dest) { 60 inc_ptr((volatile intptr_t*)dest); 61 } 62 63 inline void Atomic::dec (volatile jint* dest) { 64 __asm__ volatile ( "lock subl $1,(%0)" : 65 : "r" (dest) : "cc", "memory"); 66 } 67 68 inline void Atomic::dec_ptr(volatile void* dest) { 69 dec_ptr((volatile intptr_t*)dest); 70 } 71 72 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) { 73 __asm__ volatile ( "xchgl (%2),%0" 74 : "=r" (exchange_value) 75 : "0" (exchange_value), "r" (dest) 76 : "memory"); 77 return exchange_value; 78 } 79 80 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { 81 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest); 82 } 83 84 #define VM_HAS_SPECIALIZED_CMPXCHG_BYTE 85 inline jbyte Atomic::cmpxchg (jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, cmpxchg_memory_order order) { 86 __asm__ volatile ( "lock cmpxchgb %1,(%3)" 87 : "=a" (exchange_value) 88 : "q" (exchange_value), "a" (compare_value), "r" (dest) 89 : "cc", "memory"); 90 return exchange_value; 91 } 92 93 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, cmpxchg_memory_order order) { 94 __asm__ volatile ( "lock cmpxchgl %1,(%3)" 95 : "=a" (exchange_value) 96 : "r" (exchange_value), "a" (compare_value), "r" (dest) 97 : "cc", "memory"); 98 return exchange_value; 99 } 100 101 #ifdef AMD64 102 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } 103 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } 104 105 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 106 intptr_t addend = add_value; 107 __asm__ __volatile__ ( "lock xaddq %0,(%2)" 108 : "=r" (addend) 109 : "0" (addend), "r" (dest) 110 : "cc", "memory"); 111 return addend + add_value; 112 } 113 114 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { 115 return (void*)add_ptr(add_value, (volatile intptr_t*)dest); 116 } 117 118 inline void Atomic::inc_ptr(volatile intptr_t* dest) { 119 __asm__ __volatile__ ( "lock addq $1,(%0)" 120 : 121 : "r" (dest), "r" (mp) 122 : "cc", "memory"); 123 } 124 125 inline void Atomic::dec_ptr(volatile intptr_t* dest) { 126 __asm__ __volatile__ ( "lock subq $1,(%0)" 127 : 128 : "r" (dest) 129 : "cc", "memory"); 130 } 131 132 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { 133 __asm__ __volatile__ ("xchgq (%2),%0" 134 : "=r" (exchange_value) 135 : "0" (exchange_value), "r" (dest) 136 : "memory"); 137 return exchange_value; 138 } 139 140 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, cmpxchg_memory_order order) { 141 __asm__ __volatile__ ( "lock cmpxchgq %1,(%3)" 142 : "=a" (exchange_value) 143 : "r" (exchange_value), "a" (compare_value), "r" (dest) 144 : "cc", "memory"); 145 return exchange_value; 146 } 147 148 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) { 149 return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); 150 } 151 152 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, cmpxchg_memory_order order) { 153 return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order); 154 } 155 156 inline jlong Atomic::load(volatile jlong* src) { return *src; } 157 158 #else // !AMD64 159 160 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { 161 return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest); 162 } 163 |