add_chunk_as(chunk, log_sz);
result = AO_stack_pop(AO_free_list+log_sz);
}
-# if defined(AO_THREAD_SANITIZER) && defined(AO_USE_ALMOST_LOCK_FREE)
- /* A data race with AO_stack_pop() called above is a false positive. */
- AO_store(result, log_sz);
-# else
- *result = log_sz;
-# endif
+ *result = log_sz;
# ifdef AO_TRACE_MALLOC
fprintf(stderr, "%p: AO_malloc(%lu) = %p\n",
(void *)pthread_self(), (unsigned long)sz, (void *)(result + 1));
# define PRECHECK(a)
#endif
+/* This function is used before CAS in the below AO_stack_pop() and the */
+/* data race (reported by TSan) is OK because it results in a retry. */
+#ifdef AO_THREAD_SANITIZER
+ AO_ATTR_NO_SANITIZE_THREAD
+ static AO_t AO_load_next(volatile AO_t *first_ptr)
+ {
+ /* Assuming an architecture on which loads of word type are atomic. */
+ /* AO_load cannot be used here because it cannot be instructed to */
+ /* suppress the warning about the race. */
+ return *first_ptr;
+ }
+#else
+# define AO_load_next AO_load
+#endif
+
AO_t *
AO_stack_pop_explicit_aux_acquire(volatile AO_t *list, AO_stack_aux * a)
{
goto retry;
}
first_ptr = AO_REAL_NEXT_PTR(first);
- next = AO_load(first_ptr);
+ next = AO_load_next(first_ptr);
# if defined(__alpha__) && (__GNUC__ == 4)
if (!AO_compare_and_swap_release(list, first, next))
# else