< prev index next >

src/os_cpu/linux_s390/vm/atomic_linux_s390.hpp

Print this page
rev 13452 : imported patch Atomic_cmpxchg
rev 13453 : imported patch Atomic_add
rev 13454 : [mq]: Atomic_add_v2

*** 80,91 **** // instruction is retried as often as required. // // The return value of the method is the value that was successfully stored. At the // time the caller receives back control, the value in memory may have changed already. ! inline jint Atomic::add(jint inc, volatile jint*dest) { ! unsigned int old, upd; if (VM_Version::has_LoadAndALUAtomicV1()) { __asm__ __volatile__ ( " LGFR 0,%[inc] \n\t" // save increment " LA 3,%[mem] \n\t" // force data address into ARG2 --- 80,104 ---- // instruction is retried as often as required. // // The return value of the method is the value that was successfully stored. At the // time the caller receives back control, the value in memory may have changed already. ! template<size_t byte_size> ! struct Atomic::PlatformAdd ! : Atomic::AddAndFetch<Atomic::PlatformAdd<byte_size> > ! { ! template<typename I, typename D> ! D add_and_fetch(I add_value, D volatile* dest) const; ! }; ! ! template<> ! template<typename I, typename D> ! inline D Atomic::PlatformAdd<4>::add_and_fetch(I add_value, D volatile* dest) const { ! STATIC_CAST(4 == sizeof(I)); ! STATIC_CAST(4 == sizeof(D)); ! ! D old, upd; if (VM_Version::has_LoadAndALUAtomicV1()) { __asm__ __volatile__ ( " LGFR 0,%[inc] \n\t" // save increment " LA 3,%[mem] \n\t" // force data address into ARG2
*** 122,137 **** //---< clobbered >--- : "cc" ); } ! return (jint)upd; } ! inline intptr_t Atomic::add_ptr(intptr_t inc, volatile intptr_t* dest) { ! unsigned long old, upd; if (VM_Version::has_LoadAndALUAtomicV1()) { __asm__ __volatile__ ( " LGR 0,%[inc] \n\t" // save increment " LA 3,%[mem] \n\t" // force data address into ARG2 --- 135,155 ---- //---< clobbered >--- : "cc" ); } ! return upd; } ! template<> ! template<typename I, typename D> ! inline D Atomic::PlatformAdd<8>::add_and_fetch(I add_value, D volatile* dest) const { ! STATIC_CAST(8 == sizeof(I)); ! STATIC_CAST(8 == sizeof(D)); ! ! D old, upd; if (VM_Version::has_LoadAndALUAtomicV1()) { __asm__ __volatile__ ( " LGR 0,%[inc] \n\t" // save increment " LA 3,%[mem] \n\t" // force data address into ARG2
*** 168,184 **** //---< clobbered >--- : "cc" ); } ! return (intptr_t)upd; ! } ! ! inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { ! return (void*)add_ptr(add_value, (volatile intptr_t*)dest); } //------------ // Atomic::inc //------------ // These methods force the value in memory to be incremented (augmented by 1). --- 186,200 ---- //---< clobbered >--- : "cc" ); } ! return upd; } + template<> + struct Atomic::PlatformAdd<2>: Atomic::AddShortUsingInt {}; //------------ // Atomic::inc //------------ // These methods force the value in memory to be incremented (augmented by 1).
< prev index next >