< prev index next >

src/os_cpu/linux_x86/vm/atomic_linux_x86.hpp

Print this page
rev 13487 : imported patch add_linux_x86


  23  */
  24 
  25 #ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
  26 #define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
  27 
  28 // Implementation of class atomic
  29 
  30 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
  31 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
  32 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
  33 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
  34 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
  35 
  36 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
  37 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
  38 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
  39 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
  40 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
  41 
  42 
  43 inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {
  44   jint addend = add_value;












  45   __asm__ volatile (  "lock xaddl %0,(%2)"
  46                     : "=r" (addend)
  47                     : "0" (addend), "r" (dest)
  48                     : "cc", "memory");
  49   return addend + add_value;
  50 }
  51 
  52 inline void Atomic::inc    (volatile jint*     dest) {
  53   __asm__ volatile (  "lock addl $1,(%0)" :
  54                     : "r" (dest) : "cc", "memory");
  55 }
  56 
  57 inline void Atomic::inc_ptr(volatile void*     dest) {
  58   inc_ptr((volatile intptr_t*)dest);
  59 }
  60 
  61 inline void Atomic::dec    (volatile jint*     dest) {
  62   __asm__ volatile (  "lock subl $1,(%0)" :
  63                     : "r" (dest) : "cc", "memory");
  64 }
  65 
  66 inline void Atomic::dec_ptr(volatile void*     dest) {
  67   dec_ptr((volatile intptr_t*)dest);
  68 }
  69 


  94 }
  95 
  96 template<>
  97 template<typename T>
  98 inline T Atomic::PlatformCmpxchg<4>::operator()(T exchange_value,
  99                                                 T volatile* dest,
 100                                                 T compare_value,
 101                                                 cmpxchg_memory_order /* order */) const {
 102   STATIC_ASSERT(4 == sizeof(T));
 103   __asm__ volatile ("lock cmpxchgl %1,(%3)"
 104                     : "=a" (exchange_value)
 105                     : "r" (exchange_value), "a" (compare_value), "r" (dest)
 106                     : "cc", "memory");
 107   return exchange_value;
 108 }
 109 
 110 #ifdef AMD64
 111 inline void Atomic::store    (jlong    store_value, jlong*    dest) { *dest = store_value; }
 112 inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
 113 
 114 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
 115   intptr_t addend = add_value;




 116   __asm__ __volatile__ ("lock xaddq %0,(%2)"
 117                         : "=r" (addend)
 118                         : "0" (addend), "r" (dest)
 119                         : "cc", "memory");
 120   return addend + add_value;
 121 }
 122 
 123 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
 124   return (void*)add_ptr(add_value, (volatile intptr_t*)dest);
 125 }
 126 
 127 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
 128   __asm__ __volatile__ ("lock addq $1,(%0)"
 129                         :
 130                         : "r" (dest)
 131                         : "cc", "memory");
 132 }
 133 
 134 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
 135   __asm__ __volatile__ ("lock subq $1,(%0)"
 136                         :
 137                         : "r" (dest)
 138                         : "cc", "memory");
 139 }
 140 
 141 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
 142   __asm__ __volatile__ ("xchgq (%2),%0"
 143                         : "=r" (exchange_value)
 144                         : "0" (exchange_value), "r" (dest)


 146   return exchange_value;
 147 }
 148 
 149 template<>
 150 template<typename T>
 151 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
 152                                                 T volatile* dest,
 153                                                 T compare_value,
 154                                                 cmpxchg_memory_order /* order */) const {
 155   STATIC_ASSERT(8 == sizeof(T));
 156   __asm__ __volatile__ ("lock cmpxchgq %1,(%3)"
 157                         : "=a" (exchange_value)
 158                         : "r" (exchange_value), "a" (compare_value), "r" (dest)
 159                         : "cc", "memory");
 160   return exchange_value;
 161 }
 162 
 163 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
 164 
 165 #else // !AMD64
 166 
 167 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
 168   return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest);
 169 }
 170 
 171 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
 172   return (void*)Atomic::add((jint)add_value, (volatile jint*)dest);
 173 }
 174 
 175 
 176 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
 177   inc((volatile jint*)dest);
 178 }
 179 
 180 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
 181   dec((volatile jint*)dest);
 182 }
 183 
 184 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
 185   return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
 186 }
 187 
 188 extern "C" {
 189   // defined in linux_x86.s
 190   jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong);
 191   void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst);
 192 }
 193 
 194 template<>




  23  */
  24 
  25 #ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
  26 #define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
  27 
  28 // Implementation of class atomic
  29 
  30 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
  31 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
  32 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
  33 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
  34 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
  35 
  36 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
  37 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
  38 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
  39 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
  40 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
  41 
  42 
  43 template<size_t byte_size>
  44 struct Atomic::PlatformAdd
  45   : Atomic::FetchAndAdd<Atomic::PlatformAdd<byte_size> >
  46 {
  47   template<typename I, typename D>
  48   D fetch_and_add(I add_value, D volatile* dest) const;
  49 };
  50 
  51 template<>
  52 template<typename I, typename D>
  53 inline D Atomic::PlatformAdd<4>::fetch_and_add(I add_value, D volatile* dest) const {
  54   STATIC_ASSERT(4 == sizeof(I));
  55   STATIC_ASSERT(4 == sizeof(D));
  56   D old_value;
  57   __asm__ volatile (  "lock xaddl %0,(%2)"
  58                     : "=r" (old_value)
  59                     : "0" (add_value), "r" (dest)
  60                     : "cc", "memory");
  61   return old_value;
  62 }
  63 
  64 inline void Atomic::inc    (volatile jint*     dest) {
  65   __asm__ volatile (  "lock addl $1,(%0)" :
  66                     : "r" (dest) : "cc", "memory");
  67 }
  68 
  69 inline void Atomic::inc_ptr(volatile void*     dest) {
  70   inc_ptr((volatile intptr_t*)dest);
  71 }
  72 
  73 inline void Atomic::dec    (volatile jint*     dest) {
  74   __asm__ volatile (  "lock subl $1,(%0)" :
  75                     : "r" (dest) : "cc", "memory");
  76 }
  77 
  78 inline void Atomic::dec_ptr(volatile void*     dest) {
  79   dec_ptr((volatile intptr_t*)dest);
  80 }
  81 


 106 }
 107 
 108 template<>
 109 template<typename T>
 110 inline T Atomic::PlatformCmpxchg<4>::operator()(T exchange_value,
 111                                                 T volatile* dest,
 112                                                 T compare_value,
 113                                                 cmpxchg_memory_order /* order */) const {
 114   STATIC_ASSERT(4 == sizeof(T));
 115   __asm__ volatile ("lock cmpxchgl %1,(%3)"
 116                     : "=a" (exchange_value)
 117                     : "r" (exchange_value), "a" (compare_value), "r" (dest)
 118                     : "cc", "memory");
 119   return exchange_value;
 120 }
 121 
 122 #ifdef AMD64
 123 inline void Atomic::store    (jlong    store_value, jlong*    dest) { *dest = store_value; }
 124 inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
 125 
 126 template<>
 127 template<typename I, typename D>
 128 inline D Atomic::PlatformAdd<8>::fetch_and_add(I add_value, D volatile* dest) const {
 129   STATIC_ASSERT(8 == sizeof(I));
 130   STATIC_ASSERT(8 == sizeof(D));
 131   D old_value;
 132   __asm__ __volatile__ ("lock xaddq %0,(%2)"
 133                         : "=r" (old_value)
 134                         : "0" (add_value), "r" (dest)
 135                         : "cc", "memory");
 136   return old_value;




 137 }
 138 
 139 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
 140   __asm__ __volatile__ ("lock addq $1,(%0)"
 141                         :
 142                         : "r" (dest)
 143                         : "cc", "memory");
 144 }
 145 
 146 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
 147   __asm__ __volatile__ ("lock subq $1,(%0)"
 148                         :
 149                         : "r" (dest)
 150                         : "cc", "memory");
 151 }
 152 
 153 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
 154   __asm__ __volatile__ ("xchgq (%2),%0"
 155                         : "=r" (exchange_value)
 156                         : "0" (exchange_value), "r" (dest)


 158   return exchange_value;
 159 }
 160 
 161 template<>
 162 template<typename T>
 163 inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
 164                                                 T volatile* dest,
 165                                                 T compare_value,
 166                                                 cmpxchg_memory_order /* order */) const {
 167   STATIC_ASSERT(8 == sizeof(T));
 168   __asm__ __volatile__ ("lock cmpxchgq %1,(%3)"
 169                         : "=a" (exchange_value)
 170                         : "r" (exchange_value), "a" (compare_value), "r" (dest)
 171                         : "cc", "memory");
 172   return exchange_value;
 173 }
 174 
 175 inline jlong Atomic::load(const volatile jlong* src) { return *src; }
 176 
 177 #else // !AMD64









 178 
 179 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
 180   inc((volatile jint*)dest);
 181 }
 182 
 183 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
 184   dec((volatile jint*)dest);
 185 }
 186 
 187 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
 188   return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
 189 }
 190 
 191 extern "C" {
 192   // defined in linux_x86.s
 193   jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong);
 194   void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst);
 195 }
 196 
 197 template<>


< prev index next >