1 /*
   2  * Copyright © 2007  Chris Wilson
   3  * Copyright © 2009,2010  Red Hat, Inc.
   4  * Copyright © 2011,2012  Google, Inc.
   5  *
   6  *  This is part of HarfBuzz, a text shaping library.
   7  *
   8  * Permission is hereby granted, without written agreement and without
   9  * license or royalty fees, to use, copy, modify, and distribute this
  10  * software and its documentation for any purpose, provided that the
  11  * above copyright notice and the following two paragraphs appear in
  12  * all copies of this software.
  13  *
  14  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  15  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  16  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  17  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  18  * DAMAGE.
  19  *
  20  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  21  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  22  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
  23  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  24  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  25  *
  26  * Contributor(s):
  27  *      Chris Wilson <chris@chris-wilson.co.uk>
  28  * Red Hat Author(s): Behdad Esfahbod
  29  * Google Author(s): Behdad Esfahbod
  30  */
  31 
  32 #ifndef HB_ATOMIC_PRIVATE_HH
  33 #define HB_ATOMIC_PRIVATE_HH
  34 
  35 #include "hb-private.hh"
  36 
  37 
  38 /* atomic_int */
  39 
  40 /* We need external help for these */
  41 
  42 #if defined(hb_atomic_int_impl_add) \
  43  && defined(hb_atomic_ptr_impl_get) \
  44  && defined(hb_atomic_ptr_impl_cmpexch)
  45 
  46 /* Defined externally, i.e. in config.h; must have typedef'ed hb_atomic_int_impl_t as well. */
  47 
  48 
  49 #elif !defined(HB_NO_MT) && (defined(_WIN32) || defined(__CYGWIN__))
  50 
  51 #include <windows.h>
  52 
  53 /* MinGW has a convoluted history of supporting MemoryBarrier
  54  * properly.  As such, define a function to wrap the whole
  55  * thing. */
  56 static inline void _HBMemoryBarrier (void) {
  57 #if !defined(MemoryBarrier)
  58   long dummy = 0;
  59   InterlockedExchange (&dummy, 1);
  60 #else
  61   MemoryBarrier ();
  62 #endif
  63 }
  64 
  65 typedef LONG hb_atomic_int_impl_t;
  66 #define HB_ATOMIC_INT_IMPL_INIT(V) (V)
  67 #define hb_atomic_int_impl_add(AI, V)           InterlockedExchangeAdd (&(AI), (V))
  68 
  69 #define hb_atomic_ptr_impl_get(P)               (_HBMemoryBarrier (), (void *) *(P))
  70 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       (InterlockedCompareExchangePointer ((void **) (P), (void *) (N), (void *) (O)) == (void *) (O))
  71 
  72 
  73 #elif !defined(HB_NO_MT) && defined(__APPLE__)
  74 
  75 #include <libkern/OSAtomic.h>
  76 #ifdef __MAC_OS_X_MIN_REQUIRED
  77 #include <AvailabilityMacros.h>
  78 #elif defined(__IPHONE_OS_MIN_REQUIRED)
  79 #include <Availability.h>
  80 #endif
  81 
  82 
  83 typedef int32_t hb_atomic_int_impl_t;
  84 #define HB_ATOMIC_INT_IMPL_INIT(V) (V)
  85 #define hb_atomic_int_impl_add(AI, V)           (OSAtomicAdd32Barrier ((V), &(AI)) - (V))
  86 
  87 #define hb_atomic_ptr_impl_get(P)               (OSMemoryBarrier (), (void *) *(P))
  88 #if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
  89 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P))
  90 #else
  91 #if __ppc64__ || __x86_64__ || __aarch64__
  92 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       OSAtomicCompareAndSwap64Barrier ((int64_t) (O), (int64_t) (N), (int64_t*) (P))
  93 #else
  94 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       OSAtomicCompareAndSwap32Barrier ((int32_t) (O), (int32_t) (N), (int32_t*) (P))
  95 #endif
  96 #endif
  97 
  98 
  99 #elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES)
 100 
 101 typedef int hb_atomic_int_impl_t;
 102 #define HB_ATOMIC_INT_IMPL_INIT(V) (V)
 103 #define hb_atomic_int_impl_add(AI, V)           __sync_fetch_and_add (&(AI), (V))
 104 
 105 #define hb_atomic_ptr_impl_get(P)               (void *) (__sync_synchronize (), *(P))
 106 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       __sync_bool_compare_and_swap ((P), (O), (N))
 107 
 108 
 109 #elif !defined(HB_NO_MT) && defined(HAVE_SOLARIS_ATOMIC_OPS)
 110 
 111 #include <atomic.h>
 112 #include <mbarrier.h>
 113 
 114 typedef unsigned int hb_atomic_int_impl_t;
 115 #define HB_ATOMIC_INT_IMPL_INIT(V) (V)
 116 #define hb_atomic_int_impl_add(AI, V)           ( ({__machine_rw_barrier ();}), atomic_add_int_nv (&(AI), (V)) - (V))
 117 
 118 #define hb_atomic_ptr_impl_get(P)               ( ({__machine_rw_barrier ();}), (void *) *(P))
 119 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       ( ({__machine_rw_barrier ();}), atomic_cas_ptr ((void **) (P), (void *) (O), (void *) (N)) == (void *) (O) ? true : false)
 120 
 121 
 122 #elif !defined(HB_NO_MT) && defined(_AIX) && defined(__IBMCPP__)
 123 
 124 #include <builtins.h>
 125 
 126 
 127 static inline int hb_fetch_and_add(volatile int* AI, unsigned int V) {
 128   __lwsync();
 129   int result = __fetch_and_add(AI, V);
 130   __isync();
 131   return result;
 132 }
 133 static inline int hb_compare_and_swaplp(volatile long* P, long O, long N) {
 134   __sync();
 135   int result = __compare_and_swaplp (P, &O, N);
 136   __sync();
 137   return result;
 138 }
 139 
 140 typedef int hb_atomic_int_impl_t;
 141 #define HB_ATOMIC_INT_IMPL_INIT(V) (V)
 142 #define hb_atomic_int_impl_add(AI, V)           hb_fetch_and_add (&(AI), (V))
 143 
 144 #define hb_atomic_ptr_impl_get(P)               (__sync(), (void *) *(P))
 145 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       hb_compare_and_swaplp ((long*)(P), (long)(O), (long)(N))
 146 
 147 #elif !defined(HB_NO_MT)
 148 
 149 #define HB_ATOMIC_INT_NIL 1 /* Warn that fallback implementation is in use. */
 150 
 151 typedef volatile int hb_atomic_int_impl_t;
 152 #define HB_ATOMIC_INT_IMPL_INIT(V) (V)
 153 #define hb_atomic_int_impl_add(AI, V)           (((AI) += (V)) - (V))
 154 
 155 #define hb_atomic_ptr_impl_get(P)               ((void *) *(P))
 156 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       (* (void * volatile *) (P) == (void *) (O) ? (* (void * volatile *) (P) = (void *) (N), true) : false)
 157 
 158 
 159 #else /* HB_NO_MT */
 160 
 161 typedef int hb_atomic_int_impl_t;
 162 #define HB_ATOMIC_INT_IMPL_INIT(V)              (V)
 163 #define hb_atomic_int_impl_add(AI, V)           (((AI) += (V)) - (V))
 164 
 165 #define hb_atomic_ptr_impl_get(P)               ((void *) *(P))
 166 #define hb_atomic_ptr_impl_cmpexch(P,O,N)       (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
 167 
 168 
 169 #endif
 170 
 171 
 172 #define HB_ATOMIC_INT_INIT(V)           {HB_ATOMIC_INT_IMPL_INIT(V)}
 173 
 174 struct hb_atomic_int_t
 175 {
 176   hb_atomic_int_impl_t v;
 177 
 178   inline void set_unsafe (int v_) { v = v_; }
 179   inline int get_unsafe (void) const { return v; }
 180   inline int inc (void) { return hb_atomic_int_impl_add (const_cast<hb_atomic_int_impl_t &> (v),  1); }
 181   inline int dec (void) { return hb_atomic_int_impl_add (const_cast<hb_atomic_int_impl_t &> (v), -1); }
 182 };
 183 
 184 
 185 #define hb_atomic_ptr_get(P) hb_atomic_ptr_impl_get(P)
 186 #define hb_atomic_ptr_cmpexch(P,O,N) hb_atomic_ptr_impl_cmpexch((P),(O),(N))
 187 
 188 
 189 #endif /* HB_ATOMIC_PRIVATE_HH */