]> granicus.if.org Git - gc/commitdiff
Avoid potential race between malloc_kind and mark_thread_local_fls_for
authorIvan Maidanski <ivmai@mail.ru>
Fri, 23 Mar 2018 18:29:28 +0000 (21:29 +0300)
committerIvan Maidanski <ivmai@mail.ru>
Fri, 23 Mar 2018 18:32:44 +0000 (21:32 +0300)
Issue #214 (bdwgc).

* include/gc_inline.h (GC_FAST_M_AO_STORE): New internal macro.
* include/gc_inline.h (GC_FAST_MALLOC_GRANS): Use GC_FAST_M_AO_STORE.
* thread_local_alloc.c [THREAD_LOCAL_ALLOC]
(GC_mark_thread_local_fls_for): Use AO_load to get _freelists[i][j]
value; add comment.
* thread_local_alloc.c [THREAD_LOCAL_ALLOC && GC_GCJ_SUPPORT]
(GC_mark_thread_local_fls_for): Use AO_load to get gcj_freelists[j].

include/gc_inline.h
thread_local_alloc.c

index cafb71a5c77b66e04bea41a1fbae00865f9cf798..f28dcfcc680f7bbf13eeb0b7db56339faab66b1b 100644 (file)
@@ -83,6 +83,15 @@ GC_API GC_ATTR_MALLOC GC_ATTR_ALLOC_SIZE(1) void * GC_CALL
 # define GC_malloc_kind_global GC_malloc_kind
 #endif
 
+/* An internal macro to update the free list pointer atomically (if     */
+/* the AO primitives are available) to avoid race with the marker.      */
+#ifdef AO_HAVE_store
+# define GC_FAST_M_AO_STORE(my_fl, next) \
+                AO_store((volatile AO_t *)(my_fl), (AO_t)(next))
+#else
+# define GC_FAST_M_AO_STORE(my_fl, next) (void)(*(my_fl) = (next))
+#endif
+
 /* The ultimately general inline allocation macro.  Allocate an object  */
 /* of size granules, putting the resulting pointer in result.  Tiny_fl  */
 /* is a "tiny" free list array, which will be used first, if the size   */
@@ -116,7 +125,7 @@ GC_API GC_ATTR_MALLOC GC_ATTR_ALLOC_SIZE(1) void * GC_CALL
                           > (num_direct) + GC_TINY_FREELISTS + 1, 1)) { \
                 next = *(void **)(my_entry); \
                 result = (void *)my_entry; \
-                *my_fl = next; \
+                GC_FAST_M_AO_STORE(my_fl, next); \
                 init; \
                 GC_PREFETCH_FOR_WRITE(next); \
                 GC_ASSERT(GC_size(result) >= (granules)*GC_GRANULE_BYTES); \
@@ -129,7 +138,8 @@ GC_API GC_ATTR_MALLOC GC_ATTR_ALLOC_SIZE(1) void * GC_CALL
                     /* (GC_word)my_entry <= (num_direct) */ \
                     && my_entry != NULL) { \
                 /* Small counter value, not NULL */ \
-                *my_fl = (char *)my_entry + (granules) + 1; \
+                GC_FAST_M_AO_STORE(my_fl, (char *)my_entry \
+                                          + (granules) + 1); \
                 result = (default_expr); \
                 break; \
             } else { \
index aaebd3a8800ee57361b7ca27da7a539feee16b16..3a04fbe8db792da0249c33a7a40277252239ccbc 100644 (file)
@@ -266,13 +266,15 @@ GC_INNER void GC_mark_thread_local_fls_for(GC_tlfs p)
 
     for (j = 0; j < TINY_FREELISTS; ++j) {
       for (i = 0; i < THREAD_FREELISTS_KINDS; ++i) {
-        q = (ptr_t)p->_freelists[i][j];
+        /* Load the pointer atomically as it might be updated   */
+        /* concurrently by GC_FAST_MALLOC_GRANS.                */
+        q = (ptr_t)AO_load((volatile AO_t *)&p->_freelists[i][j]);
         if ((word)q > HBLKSIZE)
           GC_set_fl_marks(q);
       }
 #     ifdef GC_GCJ_SUPPORT
         if (EXPECT(j > 0, TRUE)) {
-          q = (ptr_t)p->gcj_freelists[j];
+          q = (ptr_t)AO_load((volatile AO_t *)&p->gcj_freelists[j]);
           if ((word)q > HBLKSIZE)
             GC_set_fl_marks(q);
         }