< prev index next >

src/os_cpu/linux_ppc/vm/atomic_linux_ppc.hpp

Print this page
rev 13452 : imported patch Atomic_cmpxchg
rev 13453 : imported patch Atomic_add
rev 13454 : [mq]: Atomic_add_v2


  76 //
  77 // - release         orders Store|Store,       (maps to lwsync)
  78 //                           Load|Store
  79 // - acquire         orders  Load|Store,       (maps to lwsync)
  80 //                           Load|Load
  81 // - fence           orders Store|Store,       (maps to sync)
  82 //                           Load|Store,
  83 //                           Load|Load,
  84 //                          Store|Load
  85 //
  86 
  87 #define strasm_sync                       "\n  sync    \n"
  88 #define strasm_lwsync                     "\n  lwsync  \n"
  89 #define strasm_isync                      "\n  isync   \n"
  90 #define strasm_release                    strasm_lwsync
  91 #define strasm_acquire                    strasm_lwsync
  92 #define strasm_fence                      strasm_sync
  93 #define strasm_nobarrier                  ""
  94 #define strasm_nobarrier_clobber_memory   ""
  95 
  96 inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {












  97 
  98   unsigned int result;
  99 
 100   __asm__ __volatile__ (
 101     strasm_lwsync
 102     "1: lwarx   %0,  0, %2    \n"
 103     "   add     %0, %0, %1    \n"
 104     "   stwcx.  %0,  0, %2    \n"
 105     "   bne-    1b            \n"
 106     strasm_isync
 107     : /*%0*/"=&r" (result)
 108     : /*%1*/"r" (add_value), /*%2*/"r" (dest)
 109     : "cc", "memory" );
 110 
 111   return (jint) result;
 112 }
 113 
 114 
 115 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {




 116 
 117   long result;
 118 
 119   __asm__ __volatile__ (
 120     strasm_lwsync
 121     "1: ldarx   %0,  0, %2    \n"
 122     "   add     %0, %0, %1    \n"
 123     "   stdcx.  %0,  0, %2    \n"
 124     "   bne-    1b            \n"
 125     strasm_isync
 126     : /*%0*/"=&r" (result)
 127     : /*%1*/"r" (add_value), /*%2*/"r" (dest)
 128     : "cc", "memory" );
 129 
 130   return (intptr_t) result;
 131 }
 132 
 133 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
 134   return (void*)add_ptr(add_value, (volatile intptr_t*)dest);
 135 }
 136 


 137 
 138 inline void Atomic::inc    (volatile jint*     dest) {
 139 
 140   unsigned int temp;
 141 
 142   __asm__ __volatile__ (
 143     strasm_nobarrier
 144     "1: lwarx   %0,  0, %2    \n"
 145     "   addic   %0, %0,  1    \n"
 146     "   stwcx.  %0,  0, %2    \n"
 147     "   bne-    1b            \n"
 148     strasm_nobarrier
 149     : /*%0*/"=&r" (temp), "=m" (*dest)
 150     : /*%2*/"r" (dest), "m" (*dest)
 151     : "cc" strasm_nobarrier_clobber_memory);
 152 
 153 }
 154 
 155 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
 156 




  76 //
  77 // - release         orders Store|Store,       (maps to lwsync)
  78 //                           Load|Store
  79 // - acquire         orders  Load|Store,       (maps to lwsync)
  80 //                           Load|Load
  81 // - fence           orders Store|Store,       (maps to sync)
  82 //                           Load|Store,
  83 //                           Load|Load,
  84 //                          Store|Load
  85 //
  86 
  87 #define strasm_sync                       "\n  sync    \n"
  88 #define strasm_lwsync                     "\n  lwsync  \n"
  89 #define strasm_isync                      "\n  isync   \n"
  90 #define strasm_release                    strasm_lwsync
  91 #define strasm_acquire                    strasm_lwsync
  92 #define strasm_fence                      strasm_sync
  93 #define strasm_nobarrier                  ""
  94 #define strasm_nobarrier_clobber_memory   ""
  95 
  96 template<size_t byte_size>
  97 struct Atomic::PlatformAdd
  98   : Atomic::AddAndFetch<Atomic::PlatformAdd<byte_size> >
  99 {
 100   template<typename I, typename D>
 101   D add_and_fetch(I add_value, D volatile* dest) const;
 102 };
 103 
 104 template<>
 105 template<typename I, typename D>
 106 inline D Atomic::PlatformAdd<4>::add_and_fetch(I add_value, D volatile* dest) const {
 107   STATIC_CAST(4 == sizeof(I));
 108   STATIC_CAST(4 == sizeof(D));
 109 
 110   D result;
 111 
 112   __asm__ __volatile__ (
 113     strasm_lwsync
 114     "1: lwarx   %0,  0, %2    \n"
 115     "   add     %0, %0, %1    \n"
 116     "   stwcx.  %0,  0, %2    \n"
 117     "   bne-    1b            \n"
 118     strasm_isync
 119     : /*%0*/"=&r" (result)
 120     : /*%1*/"r" (add_value), /*%2*/"r" (dest)
 121     : "cc", "memory" );
 122 
 123   return result;
 124 }
 125 
 126 
 127 template<>
 128 template<typename I, typename D>
 129 inline D Atomic::PlatformAdd<8>::add_and_fetch(I add_value, D volatile* dest) const {
 130   STATIC_CAST(8 == sizeof(I));
 131   STATIC_CAST(8 == sizeof(D));
 132 
 133   D result;
 134 
 135   __asm__ __volatile__ (
 136     strasm_lwsync
 137     "1: ldarx   %0,  0, %2    \n"
 138     "   add     %0, %0, %1    \n"
 139     "   stdcx.  %0,  0, %2    \n"
 140     "   bne-    1b            \n"
 141     strasm_isync
 142     : /*%0*/"=&r" (result)
 143     : /*%1*/"r" (add_value), /*%2*/"r" (dest)
 144     : "cc", "memory" );
 145 
 146   return result;




 147 }
 148 
 149 template<>
 150 struct Atomic::PlatformAdd<2>: Atomic::AddShortUsingInt {};
 151 
 152 inline void Atomic::inc    (volatile jint*     dest) {
 153 
 154   unsigned int temp;
 155 
 156   __asm__ __volatile__ (
 157     strasm_nobarrier
 158     "1: lwarx   %0,  0, %2    \n"
 159     "   addic   %0, %0,  1    \n"
 160     "   stwcx.  %0,  0, %2    \n"
 161     "   bne-    1b            \n"
 162     strasm_nobarrier
 163     : /*%0*/"=&r" (temp), "=m" (*dest)
 164     : /*%2*/"r" (dest), "m" (*dest)
 165     : "cc" strasm_nobarrier_clobber_memory);
 166 
 167 }
 168 
 169 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
 170 


< prev index next >