# define AO_EXPECT_FALSE(expr) (expr)
#endif /* !__GNUC__ */
+#if defined(__has_feature)
+ /* __has_feature() is supported. */
+# if __has_feature(address_sanitizer)
+# define AO_ADDRESS_SANITIZER
+# endif
+#endif
+
#if defined(__GNUC__) && !defined(__INTEL_COMPILER)
# define AO_compiler_barrier() __asm__ __volatile__("" : : : "memory")
#elif defined(_MSC_VER) || defined(__DMC__) || defined(__BORLANDC__) \
/* TODO: Refine for newer clang releases. */
# if defined(__clang__) \
- && !(defined(__x86_64__) || defined(__APPLE_CC__) \
- || defined(__CYGWIN__) || defined(AO_PREFER_BUILTIN_ATOMICS))
+ && (!(defined(__x86_64__) || defined(__APPLE_CC__) \
+ || defined(__CYGWIN__) || defined(AO_PREFER_BUILTIN_ATOMICS)) \
+ || (defined(AO_ADDRESS_SANITIZER) && defined(__x86_64__) \
+ && !defined(__ILP32__)))
/* As of clang-3.8 i686 (NDK r11c), it requires -latomic for all */
/* the double-wide operations. For now, we fall back to the */
/* non-intrinsic implementation by default. */
+ /* As of clang-3.8, double-wide arguments are incorrectly passed to */
+ /* atomic intrinsic operations for x64 target if ASan is enabled. */
# define AO_SKIPATOMIC_double_compare_and_swap_ANY
# define AO_SKIPATOMIC_double_load
# define AO_SKIPATOMIC_double_load_acquire