From 2178db2b67b9313b7f59d1f5f1aca743ad9b7df6 Mon Sep 17 00:00:00 2001 From: Ivan Maidanski Date: Mon, 9 Oct 2017 22:07:24 +0300 Subject: [PATCH] Eliminate TSan warnings for all counters and A.aa variable (gctest) Now most of the counters in gctest are updated using atomic increment. And, the values of gctest dropped_something and A.aa variables are updated and fetch using the atomic primitives. * tests/test.c [!AO_CLEAR] (AO_t): Define. * tests/test.c [!AO_HAVE_load] (AO_load): Define (as a non-atomic operation). * tests/test.c [!AO_HAVE_store] (AO_store): Likewise. * tests/test.c [!AO_HAVE_fetch_and_add1] (AO_fetch_and_add1): Likewise. * tests/test.c (stubborn_count, uncollectable_count, collectable_count, atomic_count, realloc_count): Change type from int to AO_t; add volatile qualifier; update comment; remove FIXME. * tests/test.c (extra_count): Define unconditionally; change type from int to AO_t; add volatile qualifier; update comment. * tests/test.c [!VERY_SMALL_CONFIG] (cons): Use AO_fetch_and_add1 to get and update the values of extra_count and stubborn_count. * tests/test.c (small_cons_uncollectable): Use AO_fetch_and_add1 to update the value of uncollectable_count. * tests/test.c [!DBG_HDRS_ALL] (run_one_test): Likewise. * tests/test.c [GC_GCJ_SUPPORT] (gcj_cons): Remove obj_cnt static variable; use extra_count instead of obj_cnt; use AO_fetch_and_add1 to get and update the value of extra_count. * tests/test.c (A): Add volatile qualifier; change the type of aa field from sexpr to AO_t. * tests/test.c (a): Do not define (and undefine). * tests/test.c (a_set, a_get): New macro (that uses AO_store/load). * tests/test.c (reverse_test_inner): Use a_set() and a_get() to store and fetch the value of a, respectively; use AO_fetch_and_add1 to update the value of realloc_count. * tests/test.c (dropped_something): Change type from int to AO_t. * tests/test.c (counter): Remove. * tests/test.c (mktree): Use extra_count instead of counter; use AO_fetch_and_add1 to update the value of extra_count. * tests/test.c [GC_PTHREADS && !SMALL_CONFIG && !GC_DEBUG] (alloc8bytes): Use AO_fetch_and_add1 to update the value of uncollectable_count. * tests/test.c (alloc_small, run_one_test): Use AO_fetch_and_add1 to update the value of atomic_count. * tests/test.c (tree_test): Use AO_load and AO_store to get and set the value of dropped_something, respectively. * tests/test.c (check_heap_stats): Cast uncollectable_count, atomic_count, stubborn_count, realloc_count to int in printf() call. --- tests/test.c | 146 ++++++++++++++++++++++++--------------------------- 1 file changed, 69 insertions(+), 77 deletions(-) diff --git a/tests/test.c b/tests/test.c index 11f2d83a..0489a6bb 100644 --- a/tests/test.c +++ b/tests/test.c @@ -158,20 +158,32 @@ # include "private/gc_atomic_ops.h" /* for counters */ #endif -/* Allocation Statistics. Incremented without synchronization. */ -/* FIXME: We should be using synchronization. */ -int stubborn_count = 0; -int uncollectable_count = 0; -int atomic_count = 0; -int realloc_count = 0; - -#ifdef AO_HAVE_fetch_and_add1 - static volatile AO_t collectable_count; -#else - int collectable_count = 0; +/* Define AO primitives for a single-threaded mode. */ +#ifndef AO_CLEAR + /* AO_t not defined. */ +# define AO_t GC_word +#endif +#ifndef AO_HAVE_load +# define AO_load(p) (*(p)) +#endif +#ifndef AO_HAVE_store +# define AO_store(p, v) (void)(*(p) = (v)) +#endif +#ifndef AO_HAVE_fetch_and_add1 # define AO_fetch_and_add1(p) ((*(p))++) #endif +/* Allocation Statistics. Synchronization is not strictly necessary. */ +volatile AO_t stubborn_count = 0; +volatile AO_t uncollectable_count = 0; +volatile AO_t collectable_count = 0; +volatile AO_t atomic_count = 0; +volatile AO_t realloc_count = 0; + +volatile AO_t extra_count = 0; /* Amount of space wasted in cons node; */ + /* also used in gcj_cons, mktree and */ + /* chktree (for other purposes). */ + #if defined(GC_AMIGA_FASTALLOC) && defined(AMIGA) void GC_amiga_free_all_mem(void); @@ -239,9 +251,6 @@ typedef struct SEXPR * sexpr; # define cdr(x) ((x) -> sexpr_cdr) # define is_nil(x) ((x) == nil) - -int extra_count = 0; /* Amount of space wasted in cons node */ - /* Silly implementation of Lisp cons. Intentionally wastes lots of space */ /* to test collector. */ # ifdef VERY_SMALL_CONFIG @@ -251,11 +260,11 @@ sexpr cons (sexpr x, sexpr y) { sexpr r; int *p; - int my_extra = extra_count; + unsigned my_extra = (unsigned)AO_fetch_and_add1(&extra_count) % 5000; - stubborn_count++; r = (sexpr) GC_MALLOC_STUBBORN(sizeof(struct SEXPR) + my_extra); CHECK_OUT_OF_MEMORY(r); + AO_fetch_and_add1(&stubborn_count); for (p = (int *)r; (word)p < (word)r + my_extra + sizeof(struct SEXPR); p++) { if (*p) { @@ -270,12 +279,6 @@ sexpr cons (sexpr x, sexpr y) # endif r -> sexpr_car = x; r -> sexpr_cdr = y; - my_extra++; - if ( my_extra >= 5000 ) { - extra_count = 0; - } else { - extra_count = my_extra; - } GC_END_STUBBORN_CHANGE(r); return(r); } @@ -337,36 +340,32 @@ sexpr small_cons (sexpr x, sexpr y) sexpr small_cons_uncollectable (sexpr x, sexpr y) { - sexpr r; + sexpr r = (sexpr)GC_MALLOC_UNCOLLECTABLE(sizeof(struct SEXPR)); - uncollectable_count++; - r = (sexpr) GC_MALLOC_UNCOLLECTABLE(sizeof(struct SEXPR)); CHECK_OUT_OF_MEMORY(r); + AO_fetch_and_add1(&uncollectable_count); r -> sexpr_car = x; r -> sexpr_cdr = (sexpr)(~(GC_word)y); return(r); } #ifdef GC_GCJ_SUPPORT - - -sexpr gcj_cons(sexpr x, sexpr y) -{ - GC_word * r; + sexpr gcj_cons(sexpr x, sexpr y) + { sexpr result; - static int obj_cnt = 0; + GC_word * r = (GC_word *)GC_GCJ_MALLOC( + sizeof(struct SEXPR) + sizeof(struct fake_vtable*), + (AO_fetch_and_add1(&extra_count) & 1) != 0 + ? &gcj_class_struct1 + : &gcj_class_struct2); - r = (GC_word *) GC_GCJ_MALLOC(sizeof(struct SEXPR) - + sizeof(struct fake_vtable*), - (++obj_cnt & 1) != 0 ? &gcj_class_struct1 - : &gcj_class_struct2); CHECK_OUT_OF_MEMORY(r); result = (sexpr)(r + 1); result -> sexpr_car = x; result -> sexpr_cdr = y; return(result); -} -#endif + } +#endif /* GC_GCJ_SUPPORT */ /* Return reverse(x) concatenated with y */ sexpr reverse1(sexpr x, sexpr y) @@ -626,11 +625,12 @@ void test_generic_malloc_or_special(void *p) { } /* Try to force a to be strangely aligned */ -struct { +volatile struct { char dummy; - sexpr aa; + AO_t aa; } A; -#define a A.aa +#define a_set(p) AO_store(&A.aa, (AO_t)(p)) +#define a_get() (sexpr)AO_load(&A.aa) /* * Repeatedly reverse lists built out of very different sized cons cells. @@ -670,7 +670,7 @@ void *GC_CALLBACK reverse_test_inner(void *data) # endif A.dummy = 17; - a = ints(1, 49); + a_set(ints(1, 49)); b = ints(1, 50); c = ints(1, BIG); d = uncollectable_ints(1, 100); @@ -679,22 +679,22 @@ void *GC_CALLBACK reverse_test_inner(void *data) /* Check that realloc updates object descriptors correctly */ AO_fetch_and_add1(&collectable_count); f = (sexpr *)GC_MALLOC(4 * sizeof(sexpr)); - realloc_count++; f = (sexpr *)GC_REALLOC((void *)f, 6 * sizeof(sexpr)); CHECK_OUT_OF_MEMORY(f); + AO_fetch_and_add1(&realloc_count); f[5] = ints(1,17); AO_fetch_and_add1(&collectable_count); g = (sexpr *)GC_MALLOC(513 * sizeof(sexpr)); test_generic_malloc_or_special(g); - realloc_count++; g = (sexpr *)GC_REALLOC((void *)g, 800 * sizeof(sexpr)); CHECK_OUT_OF_MEMORY(g); + AO_fetch_and_add1(&realloc_count); g[799] = ints(1,18); AO_fetch_and_add1(&collectable_count); h = (sexpr *)GC_MALLOC(1025 * sizeof(sexpr)); - realloc_count++; h = (sexpr *)GC_REALLOC((void *)h, 2000 * sizeof(sexpr)); CHECK_OUT_OF_MEMORY(h); + AO_fetch_and_add1(&realloc_count); # ifdef GC_GCJ_SUPPORT h[1999] = gcj_ints(1,200); for (i = 0; i < 51; ++i) @@ -714,13 +714,13 @@ void *GC_CALLBACK reverse_test_inner(void *data) GC_FREE((void *)e); check_ints(b,1,50); - check_ints(a,1,49); + check_ints(a_get(),1,49); for (i = 0; i < 50; i++) { check_ints(b,1,50); b = reverse(reverse(b)); } check_ints(b,1,50); - check_ints(a,1,49); + check_ints(a_get(),1,49); for (i = 0; i < 60; i++) { # if (defined(GC_PTHREADS) || defined(GC_WIN32_THREADS)) \ && (NTHREADS > 0) @@ -729,18 +729,14 @@ void *GC_CALLBACK reverse_test_inner(void *data) /* This maintains the invariant that a always points to a list of */ /* 49 integers. Thus this is thread safe without locks, */ /* assuming atomic pointer assignments. */ - a = reverse(reverse(a)); + a_set(reverse(reverse(a_get()))); # if !defined(AT_END) && !defined(THREADS) /* This is not thread safe, since realloc explicitly deallocates */ - if (i & 1) { - a = (sexpr)GC_REALLOC((void *)a, 500); - } else { - a = (sexpr)GC_REALLOC((void *)a, 8200); - } - realloc_count++; + a_set(GC_REALLOC(a_get(), (i & 1) != 0 ? 500 : 8200)); + AO_fetch_and_add1(&realloc_count); # endif } - check_ints(a,1,49); + check_ints(a_get(),1,49); check_ints(b,1,50); /* Restore c and d values. */ @@ -756,7 +752,7 @@ void *GC_CALLBACK reverse_test_inner(void *data) # endif check_ints(h[1999], 1,200); # ifndef THREADS - a = 0; + a_set(NULL); # endif *(sexpr volatile *)&b = 0; *(sexpr volatile *)&c = 0; @@ -769,8 +765,6 @@ void reverse_test(void) (void)GC_do_blocking(reverse_test_inner, 0); } -#undef a - /* * The rest of this builds balanced binary trees, checks that they don't * disappear, and tests finalization. @@ -783,7 +777,7 @@ typedef struct treenode { int finalizable_count = 0; int finalized_count = 0; -volatile int dropped_something = 0; +volatile AO_t dropped_something = 0; void GC_CALLBACK finalizer(void * obj, void * client_data) { @@ -814,8 +808,6 @@ void GC_CALLBACK finalizer(void * obj, void * client_data) # endif } -size_t counter = 0; - # define MAX_FINALIZED ((NTHREADS+1)*4000) # if !defined(MACOS) @@ -851,7 +843,7 @@ tn * mktree(int n) result -> level = n; result -> lchild = mktree(n-1); result -> rchild = mktree(n-1); - if (counter++ % 17 == 0 && n >= 2) { + if (AO_fetch_and_add1(&extra_count) % 17 == 0 && n >= 2) { tn * tmp; CHECK_OUT_OF_MEMORY(result->lchild); @@ -860,7 +852,7 @@ tn * mktree(int n) result -> lchild -> rchild = result -> rchild -> lchild; result -> rchild -> lchild = tmp; } - if (counter++ % 119 == 0) { + if (AO_fetch_and_add1(&extra_count) % 119 == 0) { # ifndef GC_NO_FINALIZATION int my_index; void *new_link; @@ -977,13 +969,13 @@ void chktree(tn *t, int n) GC_printf("Lost a node at level %d - collector is broken\n", n); FAIL; } - if (counter++ % 373 == 0) { - (void) GC_MALLOC(counter%5001); + if (AO_fetch_and_add1(&extra_count) % 373 == 0) { + (void)GC_MALLOC((unsigned)AO_fetch_and_add1(&extra_count) % 5001); AO_fetch_and_add1(&collectable_count); } chktree(t -> lchild, n-1); - if (counter++ % 73 == 0) { - (void) GC_MALLOC(counter%373); + if (AO_fetch_and_add1(&extra_count) % 73 == 0) { + (void)GC_MALLOC((unsigned)AO_fetch_and_add1(&extra_count) % 373); AO_fetch_and_add1(&collectable_count); } chktree(t -> rchild, n-1); @@ -1004,9 +996,9 @@ void * alloc8bytes(void) my_free_list_ptr = (void **)pthread_getspecific(fl_key); if (my_free_list_ptr == 0) { - uncollectable_count++; my_free_list_ptr = GC_NEW_UNCOLLECTABLE(void *); CHECK_OUT_OF_MEMORY(my_free_list_ptr); + AO_fetch_and_add1(&uncollectable_count); if (pthread_setspecific(fl_key, my_free_list_ptr) != 0) { GC_printf("pthread_setspecific failed\n"); FAIL; @@ -1050,11 +1042,11 @@ void alloc_small(int n) int i; for (i = 0; i < n; i += 8) { - atomic_count++; if (alloc8bytes() == 0) { GC_printf("Out of memory\n"); FAIL; } + AO_fetch_and_add1(&atomic_count); } } @@ -1081,11 +1073,11 @@ void tree_test(void) alloc_small(5000000); # endif chktree(root, TREE_HEIGHT); - if (finalized_count && ! dropped_something) { + if (finalized_count && !AO_load(&dropped_something)) { GC_printf("Premature finalization - collector is broken\n"); FAIL; } - dropped_something = 1; + AO_store(&dropped_something, (AO_t)TRUE); GC_noop1((word)root); /* Root needs to remain live until */ /* dropped_something is set. */ root = mktree(TREE_HEIGHT); @@ -1297,7 +1289,7 @@ void run_one_test(void) (unsigned long)GC_size(GC_malloc(0))); FAIL; } - uncollectable_count++; + AO_fetch_and_add1(&uncollectable_count); if (GC_size(GC_malloc_uncollectable(0)) != MIN_WORDS * sizeof(GC_word)) { GC_printf("GC_malloc_uncollectable(0) failed\n"); FAIL; @@ -1407,10 +1399,10 @@ void run_one_test(void) AO_fetch_and_add1(&collectable_count); GC_FREE(GC_MALLOC(0)); (void)GC_MALLOC_ATOMIC(0); - atomic_count++; + AO_fetch_and_add1(&atomic_count); GC_FREE(GC_MALLOC_ATOMIC(0)); test_generic_malloc_or_special(GC_malloc_atomic(1)); - atomic_count++; + AO_fetch_and_add1(&atomic_count); } } # ifdef GC_GCJ_SUPPORT @@ -1626,10 +1618,10 @@ void check_heap_stats(void) GC_printf("Completed %u tests\n", n_tests); GC_printf("Allocated %d collectable objects\n", (int)collectable_count); GC_printf("Allocated %d uncollectable objects\n", - uncollectable_count); - GC_printf("Allocated %d atomic objects\n", atomic_count); - GC_printf("Allocated %d stubborn objects\n", stubborn_count); - GC_printf("Reallocated %d objects\n", realloc_count); + (int)uncollectable_count); + GC_printf("Allocated %d atomic objects\n", (int)atomic_count); + GC_printf("Allocated %d stubborn objects\n", (int)stubborn_count); + GC_printf("Reallocated %d objects\n", (int)realloc_count); GC_printf("Finalized %d/%d objects - ", finalized_count, finalizable_count); # ifndef GC_NO_FINALIZATION -- 2.40.0