< prev index next >

src/os_cpu/linux_aarch64/vm/atomic_linux_aarch64.inline.hpp

Print this page
rev 10933 : 8154736: enhancement of cmpxchg and copy_to_survivor for ppc64
Reviewed-by:
Contributed-by: HORII@jp.ibm.com, mdoerr

*** 86,96 **** return (void *) xchg_ptr((intptr_t) exchange_value, (volatile intptr_t*) dest); } ! inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) { return __sync_val_compare_and_swap(dest, compare_value, exchange_value); } inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } --- 86,96 ---- return (void *) xchg_ptr((intptr_t) exchange_value, (volatile intptr_t*) dest); } ! inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value, memory_order order) { return __sync_val_compare_and_swap(dest, compare_value, exchange_value); } inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
*** 121,145 **** intptr_t res = __sync_lock_test_and_set (dest, exchange_value); FULL_MEM_BARRIER; return res; } ! inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) { return __sync_val_compare_and_swap(dest, compare_value, exchange_value); } ! inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) { return __sync_val_compare_and_swap(dest, compare_value, exchange_value); } ! inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) { return (void *) cmpxchg_ptr((intptr_t) exchange_value, (volatile intptr_t*) dest, ! (intptr_t) compare_value); } inline jlong Atomic::load(volatile jlong* src) { return *src; } #endif // OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_INLINE_HPP --- 121,145 ---- intptr_t res = __sync_lock_test_and_set (dest, exchange_value); FULL_MEM_BARRIER; return res; } ! inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value, memory_order order) { return __sync_val_compare_and_swap(dest, compare_value, exchange_value); } ! inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, memory_order order) { return __sync_val_compare_and_swap(dest, compare_value, exchange_value); } ! inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value, memory_order order) { return (void *) cmpxchg_ptr((intptr_t) exchange_value, (volatile intptr_t*) dest, ! (intptr_t) compare_value, order); } inline jlong Atomic::load(volatile jlong* src) { return *src; } #endif // OS_CPU_LINUX_AARCH64_VM_ATOMIC_LINUX_AARCH64_INLINE_HPP
< prev index next >