From: David Majnemer Date: Tue, 28 Jun 2016 02:54:43 +0000 (+0000) Subject: [intrin.h] Certain _Interlocked intrinsics return the old value X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=f524908a62d6ab0bb8e46476bbd1d31d7f8e3060;p=clang [intrin.h] Certain _Interlocked intrinsics return the old value This fixes PR28326. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@273986 91177308-0d34-0410-b5e6-96231b3b80d8 --- diff --git a/lib/Headers/intrin.h b/lib/Headers/intrin.h index 3033fae1cf..f18711ad1e 100644 --- a/lib/Headers/intrin.h +++ b/lib/Headers/intrin.h @@ -666,20 +666,20 @@ _InterlockedDecrement64(__int64 volatile *_Value) { \*----------------------------------------------------------------------------*/ static __inline__ char __DEFAULT_FN_ATTRS _InterlockedAnd8(char volatile *_Value, char _Mask) { - return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ short __DEFAULT_FN_ATTRS _InterlockedAnd16(short volatile *_Value, short _Mask) { - return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ long __DEFAULT_FN_ATTRS _InterlockedAnd(long volatile *_Value, long _Mask) { - return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); } #ifdef __x86_64__ static __inline__ __int64 __DEFAULT_FN_ATTRS _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) { - return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); } #endif /*----------------------------------------------------------------------------*\ @@ -687,20 +687,20 @@ _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) { \*----------------------------------------------------------------------------*/ static __inline__ char __DEFAULT_FN_ATTRS _InterlockedOr8(char volatile *_Value, char _Mask) { - return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ short __DEFAULT_FN_ATTRS _InterlockedOr16(short volatile *_Value, short _Mask) { - return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ long __DEFAULT_FN_ATTRS _InterlockedOr(long volatile *_Value, long _Mask) { - return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); } #ifdef __x86_64__ static __inline__ __int64 __DEFAULT_FN_ATTRS _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) { - return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); } #endif /*----------------------------------------------------------------------------*\ @@ -708,20 +708,20 @@ _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) { \*----------------------------------------------------------------------------*/ static __inline__ char __DEFAULT_FN_ATTRS _InterlockedXor8(char volatile *_Value, char _Mask) { - return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ short __DEFAULT_FN_ATTRS _InterlockedXor16(short volatile *_Value, short _Mask) { - return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ long __DEFAULT_FN_ATTRS _InterlockedXor(long volatile *_Value, long _Mask) { - return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); } #ifdef __x86_64__ static __inline__ __int64 __DEFAULT_FN_ATTRS _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) { - return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); } #endif /*----------------------------------------------------------------------------*\