]> granicus.if.org Git - libatomic_ops/commitdiff
Workaround a bug in double-wide intrinsics of Clang/x64 with ASan enabled
authorIvan Maidanski <ivmai@mail.ru>
Thu, 24 Nov 2016 17:13:06 +0000 (20:13 +0300)
committerIvan Maidanski <ivmai@mail.ru>
Thu, 24 Nov 2016 17:13:06 +0000 (20:13 +0300)
As of clang-3.8, double-wide arguments are incorrectly passed to
atomic intrinsic operations for x64 target if Address Sanitizer is
enabled.

* src/atomic_ops.h [__has_feature(address_sanitizer)]
(AO_ADDRESS_SANITIZER): New internal macro.
* src/atomic_ops/sysdeps/gcc/x86.h [AO_GCC_ATOMIC_TEST_AND_SET
&& __clang__ && __x86_64__ && !__ILP32__]
(AO_SKIPATOMIC_double_compare_and_swap_ANY, AO_SKIPATOMIC_double_load,
AO_SKIPATOMIC_double_load_acquire, AO_SKIPATOMIC_double_store,
AO_SKIPATOMIC_double_store_release): Define also if AO_ADDRESS_SANITIZER;
update comment.

src/atomic_ops.h
src/atomic_ops/sysdeps/gcc/x86.h

index a37ea71b8a9da575e3189d73d0749f3512fc80d1..9e054c284e8cb84aed4e22cbfb56e825fc56ab6f 100644 (file)
 # define AO_EXPECT_FALSE(expr) (expr)
 #endif /* !__GNUC__ */
 
+#if defined(__has_feature)
+  /* __has_feature() is supported.      */
+# if __has_feature(address_sanitizer)
+#   define AO_ADDRESS_SANITIZER
+# endif
+#endif
+
 #if defined(__GNUC__) && !defined(__INTEL_COMPILER)
 # define AO_compiler_barrier() __asm__ __volatile__("" : : : "memory")
 #elif defined(_MSC_VER) || defined(__DMC__) || defined(__BORLANDC__) \
index 166e353fdb12ca09be0dc0ddc2247634339e609d..ded34ac83b1c0206b47ae10dd48f1a6cefcf8f47 100644 (file)
 
 /* TODO: Refine for newer clang releases. */
 # if defined(__clang__) \
-     && !(defined(__x86_64__) || defined(__APPLE_CC__) \
-          || defined(__CYGWIN__) || defined(AO_PREFER_BUILTIN_ATOMICS))
+     && (!(defined(__x86_64__) || defined(__APPLE_CC__) \
+           || defined(__CYGWIN__) || defined(AO_PREFER_BUILTIN_ATOMICS)) \
+         || (defined(AO_ADDRESS_SANITIZER) && defined(__x86_64__) \
+             && !defined(__ILP32__)))
     /* As of clang-3.8 i686 (NDK r11c), it requires -latomic for all    */
     /* the double-wide operations.  For now, we fall back to the        */
     /* non-intrinsic implementation by default.                         */
+    /* As of clang-3.8, double-wide arguments are incorrectly passed to */
+    /* atomic intrinsic operations for x64 target if ASan is enabled.   */
 #   define AO_SKIPATOMIC_double_compare_and_swap_ANY
 #   define AO_SKIPATOMIC_double_load
 #   define AO_SKIPATOMIC_double_load_acquire