# define GC_malloc_kind_global GC_malloc_kind
#endif
+/* An internal macro to update the free list pointer atomically (if */
+/* the AO primitives are available) to avoid race with the marker. */
+#ifdef AO_HAVE_store
+# define GC_FAST_M_AO_STORE(my_fl, next) \
+ AO_store((volatile AO_t *)(my_fl), (AO_t)(next))
+#else
+# define GC_FAST_M_AO_STORE(my_fl, next) (void)(*(my_fl) = (next))
+#endif
+
/* The ultimately general inline allocation macro. Allocate an object */
/* of size granules, putting the resulting pointer in result. Tiny_fl */
/* is a "tiny" free list array, which will be used first, if the size */
> (num_direct) + GC_TINY_FREELISTS + 1, 1)) { \
next = *(void **)(my_entry); \
result = (void *)my_entry; \
- *my_fl = next; \
+ GC_FAST_M_AO_STORE(my_fl, next); \
init; \
GC_PREFETCH_FOR_WRITE(next); \
GC_ASSERT(GC_size(result) >= (granules)*GC_GRANULE_BYTES); \
/* (GC_word)my_entry <= (num_direct) */ \
&& my_entry != NULL) { \
/* Small counter value, not NULL */ \
- *my_fl = (char *)my_entry + (granules) + 1; \
+ GC_FAST_M_AO_STORE(my_fl, (char *)my_entry \
+ + (granules) + 1); \
result = (default_expr); \
break; \
} else { \
for (j = 0; j < TINY_FREELISTS; ++j) {
for (i = 0; i < THREAD_FREELISTS_KINDS; ++i) {
- q = (ptr_t)p->_freelists[i][j];
+ /* Load the pointer atomically as it might be updated */
+ /* concurrently by GC_FAST_MALLOC_GRANS. */
+ q = (ptr_t)AO_load((volatile AO_t *)&p->_freelists[i][j]);
if ((word)q > HBLKSIZE)
GC_set_fl_marks(q);
}
# ifdef GC_GCJ_SUPPORT
if (EXPECT(j > 0, TRUE)) {
- q = (ptr_t)p->gcj_freelists[j];
+ q = (ptr_t)AO_load((volatile AO_t *)&p->gcj_freelists[j]);
if ((word)q > HBLKSIZE)
GC_set_fl_marks(q);
}