98 #elif defined(__IPHONE_OS_MIN_REQUIRED)
99 #include <Availability.h>
100 #endif
101
102
103 typedef int32_t hb_atomic_int_impl_t;
104 #define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), &(AI)) - (V))
105
106 #define hb_atomic_ptr_impl_get(P) (OSMemoryBarrier (), (void *) *(P))
107 #if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
108 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P))
109 #else
110 #if __ppc64__ || __x86_64__ || __aarch64__
111 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (void *) (O), (int64_t) (void *) (N), (int64_t*) (P))
112 #else
113 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (void *) (O), (int32_t) (void *) (N), (int32_t*) (P))
114 #endif
115 #endif
116
117
118 #elif !defined(HB_NO_MT) && defined(_AIX) && defined(__IBMCPP__)
119
120 #include <builtins.h>
121
122
123 static inline int _hb_fetch_and_add(volatile int* AI, unsigned int V) {
124 __lwsync();
125 int result = __fetch_and_add(AI, V);
126 __isync();
127 return result;
128 }
129 static inline int _hb_compare_and_swaplp(volatile long* P, long O, long N) {
130 __sync();
131 int result = __compare_and_swaplp (P, &O, N);
132 __sync();
133 return result;
134 }
135
136 typedef int hb_atomic_int_impl_t;
137 #define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add (&(AI), (V))
138
|
98 #elif defined(__IPHONE_OS_MIN_REQUIRED)
99 #include <Availability.h>
100 #endif
101
102
103 typedef int32_t hb_atomic_int_impl_t;
104 #define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), &(AI)) - (V))
105
106 #define hb_atomic_ptr_impl_get(P) (OSMemoryBarrier (), (void *) *(P))
107 #if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
108 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P))
109 #else
110 #if __ppc64__ || __x86_64__ || __aarch64__
111 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (void *) (O), (int64_t) (void *) (N), (int64_t*) (P))
112 #else
113 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (void *) (O), (int32_t) (void *) (N), (int32_t*) (P))
114 #endif
115 #endif
116
117
118 #elif !defined(HB_NO_MT) && defined(_AIX) && (defined(__IBMCPP__) || defined(__ibmxl__))
119
120 #include <builtins.h>
121
122
123 static inline int _hb_fetch_and_add(volatile int* AI, unsigned int V) {
124 __lwsync();
125 int result = __fetch_and_add(AI, V);
126 __isync();
127 return result;
128 }
129 static inline int _hb_compare_and_swaplp(volatile long* P, long O, long N) {
130 __sync();
131 int result = __compare_and_swaplp (P, &O, N);
132 __sync();
133 return result;
134 }
135
136 typedef int hb_atomic_int_impl_t;
137 #define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add (&(AI), (V))
138
|