lavu/atomic: add support for the new memory model aware gcc built-ins

__sync built-ins are considered legacy and will be deprecated.
These new memory model aware built-ins have been available since GCC 4.7.0

Use them by default when available except for __atomic_compare_exchange_n(),
which is slower, and is instead implemented as a fallback for when and if gcc
removes the legacy __sync built-ins.

Reviewed-by: Michael Niedermayer <michaelni@gmx.at>
Signed-off-by: James Almer <jamrial@gmail.com>
pull/71/merge
James Almer 10 years ago
parent 931da6a5e9
commit faa9d29829
  1. 4
      configure
  2. 17
      libavutil/atomic_gcc.h

4
configure vendored

@ -1602,6 +1602,7 @@ ARCH_FEATURES="
BUILTIN_LIST="
atomic_cas_ptr
atomic_compare_exchange
machine_rw_barrier
MemoryBarrier
mm_empty
@ -2028,7 +2029,7 @@ simd_align_16_if_any="altivec neon sse"
symver_if_any="symver_asm_label symver_gnu_asm"
# threading support
atomics_gcc_if="sync_val_compare_and_swap"
atomics_gcc_if_any="sync_val_compare_and_swap atomic_compare_exchange"
atomics_suncc_if="atomic_cas_ptr machine_rw_barrier"
atomics_win32_if="MemoryBarrier"
atomics_native_if_any="$ATOMICS_LIST"
@ -4681,6 +4682,7 @@ if ! disabled network; then
fi
check_builtin atomic_cas_ptr atomic.h "void **ptr; void *oldval, *newval; atomic_cas_ptr(ptr, oldval, newval)"
check_builtin atomic_compare_exchange "" "int *ptr, *oldval; int newval; __atomic_compare_exchange_n(ptr, oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)"
check_builtin machine_rw_barrier mbarrier.h "__machine_rw_barrier()"
check_builtin MemoryBarrier windows.h "MemoryBarrier()"
check_builtin sarestart signal.h "SA_RESTART"

@ -28,27 +28,40 @@
#define avpriv_atomic_int_get atomic_int_get_gcc
static inline int atomic_int_get_gcc(volatile int *ptr)
{
#if HAVE_ATOMIC_COMPARE_EXCHANGE
return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
#else
__sync_synchronize();
return *ptr;
#endif
}
#define avpriv_atomic_int_set atomic_int_set_gcc
static inline void atomic_int_set_gcc(volatile int *ptr, int val)
{
#if HAVE_ATOMIC_COMPARE_EXCHANGE
__atomic_store_n(ptr, val, __ATOMIC_SEQ_CST);
#else
*ptr = val;
__sync_synchronize();
#endif
}
#define avpriv_atomic_int_add_and_fetch atomic_int_add_and_fetch_gcc
static inline int atomic_int_add_and_fetch_gcc(volatile int *ptr, int inc)
{
#if HAVE_ATOMIC_COMPARE_EXCHANGE
return __atomic_add_fetch(ptr, inc, __ATOMIC_SEQ_CST);
#else
return __sync_add_and_fetch(ptr, inc);
#endif
}
#define avpriv_atomic_ptr_cas atomic_ptr_cas_gcc
static inline void *atomic_ptr_cas_gcc(void * volatile *ptr,
void *oldval, void *newval)
{
#if HAVE_SYNC_VAL_COMPARE_AND_SWAP
#ifdef __ARMCC_VERSION
// armcc will throw an error if ptr is not an integer type
volatile uintptr_t *tmp = (volatile uintptr_t*)ptr;
@ -56,6 +69,10 @@ static inline void *atomic_ptr_cas_gcc(void * volatile *ptr,
#else
return __sync_val_compare_and_swap(ptr, oldval, newval);
#endif
#else
__atomic_compare_exchange_n(ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
return oldval;
#endif
}
#endif /* AVUTIL_ATOMIC_GCC_H */

Loading…
Cancel
Save