< prev index next >

src/os_cpu/bsd_zero/vm/orderAccess_bsd_zero.inline.hpp

Print this page




  23  *
  24  */
  25 
  26 #ifndef OS_CPU_BSD_ZERO_VM_ORDERACCESS_BSD_ZERO_INLINE_HPP
  27 #define OS_CPU_BSD_ZERO_VM_ORDERACCESS_BSD_ZERO_INLINE_HPP
  28 
  29 #include "runtime/orderAccess.hpp"
  30 
  31 #ifdef ARM
  32 
  33 /*
  34  * ARM Kernel helper for memory barrier.
  35  * Using __asm __volatile ("":::"memory") does not work reliable on ARM
  36  * and gcc __sync_synchronize(); implementation does not use the kernel
  37  * helper for all gcc versions so it is unreliable to use as well.
  38  */
  39 typedef void (__kernel_dmb_t) (void);
  40 #define __kernel_dmb (*(__kernel_dmb_t *) 0xffff0fa0)
  41 
  42 #define FULL_MEM_BARRIER __kernel_dmb()
  43 #define READ_MEM_BARRIER __kernel_dmb()
  44 #define WRITE_MEM_BARRIER __kernel_dmb()
  45 
  46 #else // ARM
  47 
  48 #define FULL_MEM_BARRIER __sync_synchronize()
  49 
  50 #ifdef PPC
  51 
  52 #ifdef __NO_LWSYNC__
  53 #define READ_MEM_BARRIER __asm __volatile ("sync":::"memory")
  54 #define WRITE_MEM_BARRIER __asm __volatile ("sync":::"memory")
  55 #else
  56 #define READ_MEM_BARRIER __asm __volatile ("lwsync":::"memory")
  57 #define WRITE_MEM_BARRIER __asm __volatile ("lwsync":::"memory")
  58 #endif
  59 
  60 #else // PPC
  61 
  62 #define READ_MEM_BARRIER __asm __volatile ("":::"memory")
  63 #define WRITE_MEM_BARRIER __asm __volatile ("":::"memory")
  64 
  65 #endif // PPC
  66 
  67 #endif // ARM
  68 


  69 
  70 inline void OrderAccess::loadload()   { acquire(); }
  71 inline void OrderAccess::storestore() { release(); }
  72 inline void OrderAccess::loadstore()  { acquire(); }
  73 inline void OrderAccess::storeload()  { fence(); }
  74 
  75 inline void OrderAccess::acquire() {
  76   READ_MEM_BARRIER;
  77 }
  78 
  79 inline void OrderAccess::release() {
  80   WRITE_MEM_BARRIER;
  81 }
  82 
  83 inline void OrderAccess::fence() {
  84   FULL_MEM_BARRIER;
  85 }
  86 
  87 inline jbyte    OrderAccess::load_acquire(volatile jbyte*   p) { jbyte data = *p; acquire(); return data; }
  88 inline jshort   OrderAccess::load_acquire(volatile jshort*  p) { jshort data = *p; acquire(); return data; }
  89 inline jint     OrderAccess::load_acquire(volatile jint*    p) { jint data = *p; acquire(); return data; }
  90 inline jlong    OrderAccess::load_acquire(volatile jlong*   p) {
  91   jlong tmp;
  92   os::atomic_copy64(p, &tmp);
  93   acquire();
  94   return tmp;
  95 }
  96 inline jubyte    OrderAccess::load_acquire(volatile jubyte*   p) { jubyte data = *p; acquire(); return data; }
  97 inline jushort   OrderAccess::load_acquire(volatile jushort*  p) { jushort data = *p; acquire(); return data; }
  98 inline juint     OrderAccess::load_acquire(volatile juint*    p) { juint data = *p; acquire(); return data; }
  99 inline julong   OrderAccess::load_acquire(volatile julong*  p) {
 100   julong tmp;
 101   os::atomic_copy64(p, &tmp);
 102   acquire();
 103   return tmp;
 104 }
 105 inline jfloat   OrderAccess::load_acquire(volatile jfloat*  p) { jfloat data = *p; acquire(); return data; }
 106 inline jdouble  OrderAccess::load_acquire(volatile jdouble* p) {
 107   jdouble tmp;
 108   os::atomic_copy64(p, &tmp);
 109   acquire();
 110   return tmp;
 111 }
 112 
 113 inline intptr_t OrderAccess::load_ptr_acquire(volatile intptr_t*   p) {
 114   intptr_t data = *p;
 115   acquire();
 116   return data;
 117 }
 118 inline void*    OrderAccess::load_ptr_acquire(volatile void*       p) {
 119   void *data = *(void* volatile *)p;
 120   acquire();
 121   return data;
 122 }
 123 inline void*    OrderAccess::load_ptr_acquire(const volatile void* p) {
 124   void *data = *(void* const volatile *)p;
 125   acquire();
 126   return data;
 127 }
 128 
 129 inline void     OrderAccess::release_store(volatile jbyte*   p, jbyte   v) { release(); *p = v; }
 130 inline void     OrderAccess::release_store(volatile jshort*  p, jshort  v) { release(); *p = v; }
 131 inline void     OrderAccess::release_store(volatile jint*    p, jint    v) { release(); *p = v; }
 132 inline void     OrderAccess::release_store(volatile jlong*   p, jlong   v)
 133 { release(); os::atomic_copy64(&v, p); }
 134 inline void     OrderAccess::release_store(volatile jubyte*  p, jubyte  v) { release(); *p = v; }
 135 inline void     OrderAccess::release_store(volatile jushort* p, jushort v) { release(); *p = v; }
 136 inline void     OrderAccess::release_store(volatile juint*   p, juint   v) { release(); *p = v; }
 137 inline void     OrderAccess::release_store(volatile julong*  p, julong  v)
 138 { release(); os::atomic_copy64(&v, p); }
 139 inline void     OrderAccess::release_store(volatile jfloat*  p, jfloat  v) { release(); *p = v; }
 140 inline void     OrderAccess::release_store(volatile jdouble* p, jdouble v)
 141 { release(); os::atomic_copy64(&v, p); }
 142 
 143 inline void     OrderAccess::release_store_ptr(volatile intptr_t* p, intptr_t v) { release(); *p = v; }
 144 inline void     OrderAccess::release_store_ptr(volatile void*     p, void*    v)
 145 { release(); *(void* volatile *)p = v; }
 146 
 147 inline void     OrderAccess::store_fence(jbyte*   p, jbyte   v) { *p = v; fence(); }
 148 inline void     OrderAccess::store_fence(jshort*  p, jshort  v) { *p = v; fence(); }
 149 inline void     OrderAccess::store_fence(jint*    p, jint    v) { *p = v; fence(); }
 150 inline void     OrderAccess::store_fence(jlong*   p, jlong   v) { os::atomic_copy64(&v, p); fence(); }
 151 inline void     OrderAccess::store_fence(jubyte*  p, jubyte  v) { *p = v; fence(); }
 152 inline void     OrderAccess::store_fence(jushort* p, jushort v) { *p = v; fence(); }
 153 inline void     OrderAccess::store_fence(juint*   p, juint   v) { *p = v; fence(); }
 154 inline void     OrderAccess::store_fence(julong*  p, julong  v) { os::atomic_copy64(&v, p); fence(); }
 155 inline void     OrderAccess::store_fence(jfloat*  p, jfloat  v) { *p = v; fence(); }
 156 inline void     OrderAccess::store_fence(jdouble* p, jdouble v) { os::atomic_copy64(&v, p); fence(); }
 157 
 158 inline void     OrderAccess::store_ptr_fence(intptr_t* p, intptr_t v) { *p = v; fence(); }
 159 inline void     OrderAccess::store_ptr_fence(void**    p, void*    v) { *p = v; fence(); }
 160 
 161 inline void     OrderAccess::release_store_fence(volatile jbyte*   p, jbyte   v) { release_store(p, v); fence(); }
 162 inline void     OrderAccess::release_store_fence(volatile jshort*  p, jshort  v) { release_store(p, v); fence(); }
 163 inline void     OrderAccess::release_store_fence(volatile jint*    p, jint    v) { release_store(p, v); fence(); }
 164 inline void     OrderAccess::release_store_fence(volatile jlong*   p, jlong   v) { release_store(p, v); fence(); }
 165 inline void     OrderAccess::release_store_fence(volatile jubyte*  p, jubyte  v) { release_store(p, v); fence(); }
 166 inline void     OrderAccess::release_store_fence(volatile jushort* p, jushort v) { release_store(p, v); fence(); }
 167 inline void     OrderAccess::release_store_fence(volatile juint*   p, juint   v) { release_store(p, v); fence(); }
 168 inline void     OrderAccess::release_store_fence(volatile julong*  p, julong  v) { release_store(p, v); fence(); }
 169 inline void     OrderAccess::release_store_fence(volatile jfloat*  p, jfloat  v) { release_store(p, v); fence(); }
 170 inline void     OrderAccess::release_store_fence(volatile jdouble* p, jdouble v) { release_store(p, v); fence(); }
 171 
 172 inline void     OrderAccess::release_store_ptr_fence(volatile intptr_t* p, intptr_t v) { release_store_ptr(p, v); fence(); }
 173 inline void     OrderAccess::release_store_ptr_fence(volatile void*     p, void*    v) { release_store_ptr(p, v); fence(); }
 174 
 175 #endif // OS_CPU_BSD_ZERO_VM_ORDERACCESS_BSD_ZERO_INLINE_HPP


  23  *
  24  */
  25 
  26 #ifndef OS_CPU_BSD_ZERO_VM_ORDERACCESS_BSD_ZERO_INLINE_HPP
  27 #define OS_CPU_BSD_ZERO_VM_ORDERACCESS_BSD_ZERO_INLINE_HPP
  28 
  29 #include "runtime/orderAccess.hpp"
  30 
  31 #ifdef ARM
  32 
  33 /*
  34  * ARM Kernel helper for memory barrier.
  35  * Using __asm __volatile ("":::"memory") does not work reliable on ARM
  36  * and gcc __sync_synchronize(); implementation does not use the kernel
  37  * helper for all gcc versions so it is unreliable to use as well.
  38  */
  39 typedef void (__kernel_dmb_t) (void);
  40 #define __kernel_dmb (*(__kernel_dmb_t *) 0xffff0fa0)
  41 
  42 #define FULL_MEM_BARRIER __kernel_dmb()
  43 #define LIGHT_MEM_BARRIER __kernel_dmb()

  44 
  45 #else // ARM
  46 
  47 #define FULL_MEM_BARRIER __sync_synchronize()
  48 
  49 #ifdef PPC
  50 
  51 #ifdef __NO_LWSYNC__
  52 #define LIGHT_MEM_BARRIER __asm __volatile ("sync":::"memory")

  53 #else
  54 #define LIGHT_MEM_BARRIER __asm __volatile ("lwsync":::"memory")

  55 #endif
  56 
  57 #else // PPC
  58 
  59 #define LIGHT_MEM_BARRIER __asm __volatile ("":::"memory")

  60 
  61 #endif // PPC
  62 
  63 #endif // ARM
  64 
  65 // Note: What is meant by LIGHT_MEM_BARRIER is a barrier which is sufficient
  66 // to provide TSO semantics, i.e. StoreStore | LoadLoad | LoadStore.
  67 
  68 inline void OrderAccess::loadload()   { LIGHT_MEM_BARRIER; }
  69 inline void OrderAccess::storestore() { LIGHT_MEM_BARRIER; }
  70 inline void OrderAccess::loadstore()  { LIGHT_MEM_BARRIER; }
  71 inline void OrderAccess::storeload()  { FULL_MEM_BARRIER;  }
  72 
  73 inline void OrderAccess::acquire()    { LIGHT_MEM_BARRIER; }
  74 inline void OrderAccess::release()    { LIGHT_MEM_BARRIER; }
  75 inline void OrderAccess::fence()      { FULL_MEM_BARRIER;  }





























































































  76 
  77 #define VM_HAS_GENERALIZED_ORDER_ACCESS 1

  78 
  79 #endif // OS_CPU_BSD_ZERO_VM_ORDERACCESS_BSD_ZERO_INLINE_HPP
< prev index next >