From 76b3d3a57583fdb19152f4d911b11725e31b4a06 Mon Sep 17 00:00:00 2001 From: hboehm Date: Wed, 19 Dec 2007 01:51:39 +0000 Subject: [PATCH] 2007-12-18 Hans Boehm (really mainly Peter Wang) * include/gc_inline.h, include/gc_tiny_fl.h: cleanups to make usable in other contexts. 2007-12-18 Hans Boehm (really Radek Polak) * include/gc.h: Don't define GC_HAVE_BUILTIN_BACKTRACE for uclibc. --- ChangeLog | 9 +++++++++ include/gc.h | 2 +- include/gc_inline.h | 28 ++++++++++++++-------------- include/gc_tiny_fl.h | 8 ++++---- 4 files changed, 28 insertions(+), 19 deletions(-) diff --git a/ChangeLog b/ChangeLog index fe6792ee..122ebbe5 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,12 @@ +2007-12-18 Hans Boehm (really mainly Peter Wang) + + * include/gc_inline.h, include/gc_tiny_fl.h: cleanups to make usable in other + contexts. + +2007-12-18 Hans Boehm (really Radek Polak) + + * include/gc.h: Don't define GC_HAVE_BUILTIN_BACKTRACE for uclibc. + 2007-12-18 Hans Boehm * gc_cpp.cc: Don't include gc_cpp.h from local directory. diff --git a/include/gc.h b/include/gc.h index 0c6f5e49..a23cd6e1 100644 --- a/include/gc.h +++ b/include/gc.h @@ -462,7 +462,7 @@ GC_API void * GC_malloc_atomic_ignore_off_page(size_t lb); #if defined(__linux__) || defined(__GLIBC__) # include # if (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 1 || __GLIBC__ > 2) \ - && !defined(__ia64__) + && !defined(__ia64__) && !defined(__UCLIBC__) # ifndef GC_HAVE_BUILTIN_BACKTRACE # define GC_HAVE_BUILTIN_BACKTRACE # endif diff --git a/include/gc_inline.h b/include/gc_inline.h index da7e2e91..ffc5b3ea 100644 --- a/include/gc_inline.h +++ b/include/gc_inline.h @@ -34,9 +34,9 @@ #endif /* __GNUC__ */ /* The ultimately general inline allocation macro. Allocate an object */ -/* of size bytes, putting the resulting pointer in result. Tiny_fl is */ -/* a "tiny" free list array, which will be used first, if the size */ -/* is appropriate. If bytes is too large, we allocate with */ +/* of size granules, putting the resulting pointer in result. Tiny_fl */ +/* is a "tiny" free list array, which will be used first, if the size */ +/* is appropriate. If granules is too large, we allocate with */ /* default_expr instead. If we need to refill the free list, we use */ /* GC_generic_malloc_many with the indicated kind. */ /* Tiny_fl should be an array of GC_TINY_FREELISTS void * pointers. */ @@ -47,7 +47,7 @@ /* be initialized to (void *)0. */ /* We rely on much of this hopefully getting optimized away in the */ /* num_direct = 0 case. */ -/* Particularly if bytes is constant, this should generate a small */ +/* Particularly if granules is constant, this should generate a small */ /* amount of code. */ # define GC_FAST_MALLOC_GRANS(result,granules,tiny_fl,num_direct,\ kind,default_expr,init) \ @@ -64,17 +64,17 @@ /* Entry contains counter or NULL */ \ if ((GC_word)my_entry - 1 < num_direct) { \ /* Small counter value, not NULL */ \ - *my_fl = (ptr_t)my_entry + granules + 1; \ + *my_fl = (char *)my_entry + granules + 1; \ result = default_expr; \ goto out; \ } else { \ /* Large counter or NULL */ \ GC_generic_malloc_many(((granules) == 0? GC_GRANULE_BYTES : \ - RAW_BYTES_FROM_INDEX(granules)), \ + GC_RAW_BYTES_FROM_INDEX(granules)), \ kind, my_fl); \ my_entry = *my_fl; \ if (my_entry == 0) { \ - result = GC_oom_fn(bytes); \ + result = GC_oom_fn(granules*GC_GRANULE_BYTES); \ goto out; \ } \ } \ @@ -84,7 +84,7 @@ *my_fl = next; \ init; \ PREFETCH_FOR_WRITE(next); \ - GC_ASSERT(GC_size(result) >= bytes + EXTRA_BYTES); \ + GC_ASSERT(GC_size(result) >= granules*GC_GRANULE_BYTES); \ GC_ASSERT((kind) == PTRFREE || ((GC_word *)result)[1] == 0); \ out: ; \ } \ @@ -102,17 +102,17 @@ /* a global array. */ # define GC_MALLOC_WORDS(result,n,tiny_fl) \ { \ - size_t grans = WORDS_TO_WHOLE_GRANULES(n); \ + size_t grans = GC_WORDS_TO_WHOLE_GRANULES(n); \ GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \ - NORMAL, GC_malloc(grans*GRANULE_BYTES), \ + NORMAL, GC_malloc(grans*GC_GRANULE_BYTES), \ *(void **)result = 0); \ } # define GC_MALLOC_ATOMIC_WORDS(result,n,tiny_fl) \ { \ - size_t grans = WORDS_TO_WHOLE_GRANULES(n); \ + size_t grans = GC_WORDS_TO_WHOLE_GRANULES(n); \ GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \ - PTRFREE, GC_malloc_atomic(grans*GRANULE_BYTES), \ + PTRFREE, GC_malloc_atomic(grans*GC_GRANULE_BYTES), \ /* no initialization */); \ } @@ -120,9 +120,9 @@ /* And once more for two word initialized objects: */ # define GC_CONS(result, first, second, tiny_fl) \ { \ - size_t grans = WORDS_TO_WHOLE_GRANULES(2); \ + size_t grans = GC_WORDS_TO_WHOLE_GRANULES(2); \ GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \ - NORMAL, GC_malloc(grans*GRANULE_BYTES), \ + NORMAL, GC_malloc(grans*GC_GRANULE_BYTES), \ *(void **)result = (void *)(first)); \ ((void **)(result))[1] = (void *)(second); \ } diff --git a/include/gc_tiny_fl.h b/include/gc_tiny_fl.h index 52b6864b..91b77fdf 100644 --- a/include/gc_tiny_fl.h +++ b/include/gc_tiny_fl.h @@ -27,7 +27,7 @@ */ /* - * We always set GRANULE_BYTES to twice the length of a pointer. + * We always set GC_GRANULE_BYTES to twice the length of a pointer. * This means that all allocation requests are rounded up to the next * multiple of 16 on 64-bit architectures or 8 on 32-bit architectures. * This appears to be a reasonable compromise between fragmentation overhead @@ -61,7 +61,7 @@ #if GC_GRANULE_WORDS == 2 # define GC_WORDS_TO_GRANULES(n) ((n)>>1) #else -# define GC_WORDS_TO_GRANULES(n) ((n)*sizeof(void *)/GRANULE_BYTES) +# define GC_WORDS_TO_GRANULES(n) ((n)*sizeof(void *)/GC_GRANULE_BYTES) #endif /* A "tiny" free list header contains TINY_FREELISTS pointers to */ @@ -76,7 +76,7 @@ # endif #endif /* !GC_TINY_FREELISTS */ -/* The ith free list corresponds to size i*GRANULE_BYTES */ +/* The ith free list corresponds to size i*GC_GRANULE_BYTES */ /* Internally to the collector, the index can be computed with */ /* ROUNDED_UP_GRANULES. Externally, we don't know whether */ /* DONT_ADD_BYTE_AT_END is set, but the client should know. */ @@ -84,6 +84,6 @@ /* Convert a free list index to the actual size of objects */ /* on that list, including extra space we added. Not an */ /* inverse of the above. */ -#define RAW_BYTES_FROM_INDEX(i) ((i) * GC_GRANULE_BYTES) +#define GC_RAW_BYTES_FROM_INDEX(i) ((i) * GC_GRANULE_BYTES) #endif /* GC_TINY_FL_H */ -- 2.40.0