use #undef instead of change the name of MemoryBarrier

pull/1/head
jieluo@google.com 10 years ago
parent 8d6f04ad8d
commit 9efebb3294
  1. 7
      src/google/protobuf/stubs/atomicops.h
  2. 26
      src/google/protobuf/stubs/atomicops_internals_arm64_gcc.h
  3. 10
      src/google/protobuf/stubs/atomicops_internals_arm_gcc.h
  4. 10
      src/google/protobuf/stubs/atomicops_internals_arm_qnx.h
  5. 18
      src/google/protobuf/stubs/atomicops_internals_macosx.h
  6. 14
      src/google/protobuf/stubs/atomicops_internals_mips_gcc.h
  7. 8
      src/google/protobuf/stubs/atomicops_internals_pnacl.h
  8. 2
      src/google/protobuf/stubs/atomicops_internals_tsan.h
  9. 12
      src/google/protobuf/stubs/atomicops_internals_x86_gcc.h
  10. 2
      src/google/protobuf/stubs/atomicops_internals_x86_msvc.cc
  11. 4
      src/google/protobuf/stubs/atomicops_internals_x86_msvc.h

@ -111,8 +111,8 @@ Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
// ensure that no later memory access can be reordered ahead of the operation. // ensure that no later memory access can be reordered ahead of the operation.
// "Release" operations ensure that no previous memory access can be reordered // "Release" operations ensure that no previous memory access can be reordered
// after the operation. "Barrier" operations have both "Acquire" and "Release" // after the operation. "Barrier" operations have both "Acquire" and "Release"
// semantics. A MemoryBarrierInternal() has "Barrier" semantics, but does no // semantics. A MemoryBarrier() has "Barrier" semantics, but does no memory
// memory access. // access.
Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value, Atomic32 old_value,
Atomic32 new_value); Atomic32 new_value);
@ -120,7 +120,8 @@ Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value, Atomic32 old_value,
Atomic32 new_value); Atomic32 new_value);
void MemoryBarrierInternal(); #undef MemoryBarrier
void MemoryBarrier();
void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value); void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value);
void Acquire_Store(volatile Atomic32* ptr, Atomic32 value); void Acquire_Store(volatile Atomic32* ptr, Atomic32 value);
void Release_Store(volatile Atomic32* ptr, Atomic32 value); void Release_Store(volatile Atomic32* ptr, Atomic32 value);

@ -37,7 +37,7 @@ namespace google {
namespace protobuf { namespace protobuf {
namespace internal { namespace internal {
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
__asm__ __volatile__ ("dmb ish" ::: "memory"); // NOLINT __asm__ __volatile__ ("dmb ish" ::: "memory"); // NOLINT
} }
@ -117,9 +117,9 @@ inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
Atomic32 increment) { Atomic32 increment) {
MemoryBarrierInternal(); MemoryBarrier();
Atomic32 result = NoBarrier_AtomicIncrement(ptr, increment); Atomic32 result = NoBarrier_AtomicIncrement(ptr, increment);
MemoryBarrierInternal(); MemoryBarrier();
return result; return result;
} }
@ -128,7 +128,7 @@ inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value, Atomic32 old_value,
Atomic32 new_value) { Atomic32 new_value) {
Atomic32 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); Atomic32 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value);
MemoryBarrierInternal(); MemoryBarrier();
return prev; return prev;
} }
@ -136,7 +136,7 @@ inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value, Atomic32 old_value,
Atomic32 new_value) { Atomic32 new_value) {
MemoryBarrierInternal(); MemoryBarrier();
Atomic32 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); Atomic32 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value);
return prev; return prev;
@ -148,7 +148,7 @@ inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
@ -178,7 +178,7 @@ inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
} }
inline Atomic32 Release_Load(volatile const Atomic32* ptr) { inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }
@ -253,9 +253,9 @@ inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr, inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
Atomic64 increment) { Atomic64 increment) {
MemoryBarrierInternal(); MemoryBarrier();
Atomic64 result = NoBarrier_AtomicIncrement(ptr, increment); Atomic64 result = NoBarrier_AtomicIncrement(ptr, increment);
MemoryBarrierInternal(); MemoryBarrier();
return result; return result;
} }
@ -264,7 +264,7 @@ inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
Atomic64 old_value, Atomic64 old_value,
Atomic64 new_value) { Atomic64 new_value) {
Atomic64 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); Atomic64 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value);
MemoryBarrierInternal(); MemoryBarrier();
return prev; return prev;
} }
@ -272,7 +272,7 @@ inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr, inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
Atomic64 old_value, Atomic64 old_value,
Atomic64 new_value) { Atomic64 new_value) {
MemoryBarrierInternal(); MemoryBarrier();
Atomic64 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); Atomic64 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value);
return prev; return prev;
@ -284,7 +284,7 @@ inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) { inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
@ -314,7 +314,7 @@ inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
} }
inline Atomic64 Release_Load(volatile const Atomic64* ptr) { inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }

@ -115,17 +115,17 @@ inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
} }
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
pLinuxKernelMemoryBarrier(); pLinuxKernelMemoryBarrier();
} }
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
MemoryBarrierInternal(); MemoryBarrier();
*ptr = value; *ptr = value;
} }
@ -135,12 +135,12 @@ inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
Atomic32 value = *ptr; Atomic32 value = *ptr;
MemoryBarrierInternal(); MemoryBarrier();
return value; return value;
} }
inline Atomic32 Release_Load(volatile const Atomic32* ptr) { inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }

@ -110,17 +110,17 @@ inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
} }
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
__sync_synchronize(); __sync_synchronize();
} }
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
MemoryBarrierInternal(); MemoryBarrier();
*ptr = value; *ptr = value;
} }
@ -130,12 +130,12 @@ inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
Atomic32 value = *ptr; Atomic32 value = *ptr;
MemoryBarrierInternal(); MemoryBarrier();
return value; return value;
} }
inline Atomic32 Release_Load(volatile const Atomic32* ptr) { inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }

@ -73,7 +73,7 @@ inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr)); return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
} }
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
OSMemoryBarrier(); OSMemoryBarrier();
} }
@ -103,11 +103,11 @@ inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
MemoryBarrierInternal(); MemoryBarrier();
*ptr = value; *ptr = value;
} }
@ -117,12 +117,12 @@ inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
Atomic32 value = *ptr; Atomic32 value = *ptr;
MemoryBarrierInternal(); MemoryBarrier();
return value; return value;
} }
inline Atomic32 Release_Load(volatile const Atomic32* ptr) { inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }
@ -193,11 +193,11 @@ inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) { inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
MemoryBarrierInternal(); MemoryBarrier();
*ptr = value; *ptr = value;
} }
@ -207,12 +207,12 @@ inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) { inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
Atomic64 value = *ptr; Atomic64 value = *ptr;
MemoryBarrierInternal(); MemoryBarrier();
return value; return value;
} }
inline Atomic64 Release_Load(volatile const Atomic64* ptr) { inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }

@ -125,8 +125,8 @@ inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
// ensure that no later memory access can be reordered ahead of the operation. // ensure that no later memory access can be reordered ahead of the operation.
// "Release" operations ensure that no previous memory access can be reordered // "Release" operations ensure that no previous memory access can be reordered
// after the operation. "Barrier" operations have both "Acquire" and "Release" // after the operation. "Barrier" operations have both "Acquire" and "Release"
// semantics. A MemoryBarrierInternal() has "Barrier" semantics, but does no // semantics. A MemoryBarrier() has "Barrier" semantics, but does no memory
// memory access. // access.
inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value, Atomic32 old_value,
Atomic32 new_value) { Atomic32 new_value) {
@ -149,17 +149,17 @@ inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
} }
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
__asm__ __volatile__("sync" : : : "memory"); __asm__ __volatile__("sync" : : : "memory");
} }
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
MemoryBarrierInternal(); MemoryBarrier();
*ptr = value; *ptr = value;
} }
@ -169,12 +169,12 @@ inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
Atomic32 value = *ptr; Atomic32 value = *ptr;
MemoryBarrierInternal(); MemoryBarrier();
return value; return value;
} }
inline Atomic32 Release_Load(volatile const Atomic32* ptr) { inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }

@ -43,7 +43,7 @@ inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
return __sync_val_compare_and_swap(ptr, old_value, new_value); return __sync_val_compare_and_swap(ptr, old_value, new_value);
} }
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
__sync_synchronize(); __sync_synchronize();
} }
@ -51,18 +51,18 @@ inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value, Atomic32 old_value,
Atomic32 new_value) { Atomic32 new_value) {
Atomic32 ret = NoBarrier_CompareAndSwap(ptr, old_value, new_value); Atomic32 ret = NoBarrier_CompareAndSwap(ptr, old_value, new_value);
MemoryBarrierInternal(); MemoryBarrier();
return ret; return ret;
} }
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
MemoryBarrierInternal(); MemoryBarrier();
*ptr = value; *ptr = value;
} }
inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
Atomic32 value = *ptr; Atomic32 value = *ptr;
MemoryBarrierInternal(); MemoryBarrier();
return value; return value;
} }

@ -206,7 +206,7 @@ inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
return cmp; return cmp;
} }
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
__tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
} }

@ -119,18 +119,18 @@ inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
// 64-bit implementations of memory barrier can be simpler, because it // 64-bit implementations of memory barrier can be simpler, because it
// "mfence" is guaranteed to exist. // "mfence" is guaranteed to exist.
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
__asm__ __volatile__("mfence" : : : "memory"); __asm__ __volatile__("mfence" : : : "memory");
} }
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
#else #else
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
if (AtomicOps_Internalx86CPUFeatures.has_sse2) { if (AtomicOps_Internalx86CPUFeatures.has_sse2) {
__asm__ __volatile__("mfence" : : : "memory"); __asm__ __volatile__("mfence" : : : "memory");
} else { // mfence is faster but not present on PIII } else { // mfence is faster but not present on PIII
@ -168,7 +168,7 @@ inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
} }
inline Atomic32 Release_Load(volatile const Atomic32* ptr) { inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }
@ -225,7 +225,7 @@ inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) { inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
*ptr = value; *ptr = value;
MemoryBarrierInternal(); MemoryBarrier();
} }
inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
@ -262,7 +262,7 @@ inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
} }
inline Atomic64 Release_Load(volatile const Atomic64* ptr) { inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }

@ -44,7 +44,7 @@ namespace google {
namespace protobuf { namespace protobuf {
namespace internal { namespace internal {
inline void MemoryBarrierInternal() { inline void MemoryBarrier() {
// We use MemoryBarrier from WinNT.h // We use MemoryBarrier from WinNT.h
::MemoryBarrier(); ::MemoryBarrier();
} }

@ -82,7 +82,7 @@ inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
} }
inline Atomic32 Release_Load(volatile const Atomic32* ptr) { inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }
@ -125,7 +125,7 @@ inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
} }
inline Atomic64 Release_Load(volatile const Atomic64* ptr) { inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
MemoryBarrierInternal(); MemoryBarrier();
return *ptr; return *ptr;
} }

Loading…
Cancel
Save