|
|
|
@ -119,7 +119,7 @@ static inline void _hb_memory_barrier (void) |
|
|
|
|
#define hb_atomic_int_impl_add(AI, V) InterlockedExchangeAdd ((LONG *) (AI), (V)) |
|
|
|
|
static_assert ((sizeof (LONG) == sizeof (int)), ""); |
|
|
|
|
|
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((void **) (P), (void *) (N), (void *) (O)) == (void *) (O)) |
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((P), (N), (O)) == (O)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES) |
|
|
|
@ -147,17 +147,17 @@ static inline int _hb_fetch_and_add (int *AI, int V) |
|
|
|
|
_hb_memory_r_barrier (); |
|
|
|
|
return result; |
|
|
|
|
} |
|
|
|
|
static inline bool _hb_compare_and_swap_ptr (const void **P, const void *O, const void *N) |
|
|
|
|
static inline bool _hb_compare_and_swap_ptr (void **P, void *O, void *N) |
|
|
|
|
{ |
|
|
|
|
_hb_memory_w_barrier (); |
|
|
|
|
int result = atomic_cas_ptr ((void **) P, (void *) O, (void *) N) == (void *) O; |
|
|
|
|
bool result = atomic_cas_ptr (P, O, N) == O; |
|
|
|
|
_hb_memory_r_barrier (); |
|
|
|
|
return result; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
#define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V)) |
|
|
|
|
|
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swap_ptr ((const void **) (P), (O), (N)) |
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swap_ptr ((P), (O), (N)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#elif !defined(HB_NO_MT) && defined(__APPLE__) |
|
|
|
@ -174,12 +174,12 @@ static inline bool _hb_compare_and_swap_ptr (const void **P, const void *O, cons |
|
|
|
|
#define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), (AI)) - (V)) |
|
|
|
|
|
|
|
|
|
#if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100) |
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P)) |
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((O), (N), (P)) |
|
|
|
|
#else |
|
|
|
|
#if __ppc64__ || __x86_64__ || __aarch64__ |
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (void *) (O), (int64_t) (void *) (N), (int64_t*) (P)) |
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (O), (int64_t) (N), (int64_t*) (P)) |
|
|
|
|
#else |
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (void *) (O), (int32_t) (void *) (N), (int32_t*) (P)) |
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (O), (int32_t) (N), (int32_t*) (P)) |
|
|
|
|
#endif |
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
@ -254,7 +254,7 @@ static_assert ((sizeof (long) == sizeof (void *)), ""); |
|
|
|
|
#define hb_atomic_ptr_impl_get_relaxed(P) (*(P)) |
|
|
|
|
#endif |
|
|
|
|
#ifndef hb_atomic_int_impl_set |
|
|
|
|
inline void hb_atomic_int_impl_get (int *AI, int v) { _hb_memory_w_barrier (); *AI = v; } |
|
|
|
|
inline void hb_atomic_int_impl_set (int *AI, int v) { _hb_memory_w_barrier (); *AI = v; } |
|
|
|
|
#endif |
|
|
|
|
#ifndef hb_atomic_int_impl_get |
|
|
|
|
inline int hb_atomic_int_impl_get (int *AI) { int v = *AI; _hb_memory_r_barrier (); return v; } |
|
|
|
@ -291,7 +291,7 @@ struct hb_atomic_ptr_t |
|
|
|
|
inline void set_relaxed (T* v_) const { hb_atomic_ptr_impl_set_relaxed (&v, v_); } |
|
|
|
|
inline T *get_relaxed (void) const { return (T *) hb_atomic_ptr_impl_get_relaxed (&v); } |
|
|
|
|
inline T *get (void) const { return (T *) hb_atomic_ptr_impl_get ((void **) &v); } |
|
|
|
|
inline bool cmpexch (const T *old, T *new_) const{ return hb_atomic_ptr_impl_cmpexch (&v, old, new_); } |
|
|
|
|
inline bool cmpexch (const T *old, T *new_) const { return hb_atomic_ptr_impl_cmpexch ((void **) &v, (void *) old, (void *) new_); } |
|
|
|
|
|
|
|
|
|
mutable T *v; |
|
|
|
|
}; |
|
|
|
|