From: Ivan Maidanski Date: Fri, 15 Jan 2016 09:48:06 +0000 (+0300) Subject: GC_malloc_[atomic_]uncollectable generalization X-Git-Tag: gc7_6_0~88 X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=283e7fded73a8428f94fa0e0baa24e5ed2a1f78b;p=gc GC_malloc_[atomic_]uncollectable generalization * include/gc_mark.h (GC_generic_malloc_uncollectable): New public function declaration. * malloc.c (GC_generic_malloc_uncollectable): New function (imlementation mostly copied from GC_malloc_uncollectable). * malloc.c (GC_malloc_uncollectable, GC_malloc_atomic_uncollectable, calloc): Use GC_malloc_atomic_uncollectable. * mallocx.c (GC_generic_or_special_malloc): Likewise. * mallocx.c (GC_malloc_atomic_uncollectable): Move to malloc.c. --- diff --git a/include/gc_mark.h b/include/gc_mark.h index d5dc42e9..330d71e8 100644 --- a/include/gc_mark.h +++ b/include/gc_mark.h @@ -204,6 +204,11 @@ GC_API GC_ATTR_MALLOC GC_ATTR_ALLOC_SIZE(1) void * GC_CALL /* first page of the resulting object */ /* are ignored. */ +/* Generalized version of GC_malloc_[atomic_]uncollectable. */ +GC_API GC_ATTR_MALLOC GC_ATTR_ALLOC_SIZE(1) void * GC_CALL + GC_generic_malloc_uncollectable( + size_t /* lb */, int /* knd */); + /* Same as above but primary for allocating an object of the same kind */ /* as an existing one (kind obtained by GC_get_kind_and_size). */ /* Not suitable for GCJ and typed-malloc kinds. */ diff --git a/malloc.c b/malloc.c index 50bce625..72a98ec2 100644 --- a/malloc.c +++ b/malloc.c @@ -293,8 +293,8 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_generic_malloc(size_t lb, int k) } } -/* Allocate lb bytes of pointerful, traced, but not collectible data. */ -GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_uncollectable(size_t lb) +GC_API GC_ATTR_MALLOC void * GC_CALL GC_generic_malloc_uncollectable( + size_t lb, int k) { void *op; size_t lg; @@ -307,9 +307,9 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_uncollectable(size_t lb) /* collected anyway. */ lg = GC_size_map[lb]; LOCK(); - op = GC_freelists[UNCOLLECTABLE][lg]; - if (EXPECT(op != 0, TRUE)) { - GC_freelists[UNCOLLECTABLE][lg] = obj_link(op); + op = GC_freelists[k][lg]; + if (EXPECT(op != NULL, TRUE)) { + GC_freelists[k][lg] = obj_link(op); obj_link(op) = 0; GC_bytes_allocd += GRANULES_TO_BYTES(lg); /* Mark bit was already set on free list. It will be */ @@ -319,15 +319,16 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_uncollectable(size_t lb) UNLOCK(); } else { UNLOCK(); - op = GC_generic_malloc(lb, UNCOLLECTABLE); + op = GC_generic_malloc(lb, k); /* For small objects, the free lists are completely marked. */ } GC_ASSERT(0 == op || GC_is_marked(op)); } else { hdr * hhdr; - op = GC_generic_malloc(lb, UNCOLLECTABLE); - if (0 == op) return(0); + op = GC_generic_malloc(lb, k); + if (NULL == op) + return NULL; GC_ASSERT(((word)op & (HBLKSIZE - 1)) == 0); /* large block */ hhdr = HDR(op); @@ -347,6 +348,23 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_uncollectable(size_t lb) return op; } +/* Allocate lb bytes of pointerful, traced, but not collectible data. */ +GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_uncollectable(size_t lb) +{ + return GC_generic_malloc_uncollectable(lb, UNCOLLECTABLE); +} + +#ifdef GC_ATOMIC_UNCOLLECTABLE + /* Allocate lb bytes of pointer-free, untraced, uncollectible data */ + /* This is normally roughly equivalent to the system malloc. */ + /* But it may be useful if malloc is redefined. */ + GC_API GC_ATTR_MALLOC void * GC_CALL + GC_malloc_atomic_uncollectable(size_t lb) + { + return GC_generic_malloc_uncollectable(lb, AUNCOLLECTABLE); + } +#endif /* GC_ATOMIC_UNCOLLECTABLE */ + #if defined(REDIRECT_MALLOC) && !defined(REDIRECT_MALLOC_IN_HEADER) # ifndef MSWINCE @@ -428,7 +446,7 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_uncollectable(size_t lb) && (word)caller < (word)GC_libpthread_end) || ((word)caller >= (word)GC_libld_start && (word)caller < (word)GC_libld_end)) - return GC_malloc_uncollectable(n*lb); + return GC_generic_malloc_uncollectable(n * lb, UNCOLLECTABLE); /* The two ranges are actually usually adjacent, so there may */ /* be a way to speed this up. */ } diff --git a/mallocx.c b/mallocx.c index a8827b9e..f84b0b60 100644 --- a/mallocx.c +++ b/mallocx.c @@ -69,11 +69,10 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_generic_or_special_malloc(size_t lb, case NORMAL: return GC_malloc(lb); case UNCOLLECTABLE: - return GC_malloc_uncollectable(lb); # ifdef GC_ATOMIC_UNCOLLECTABLE case AUNCOLLECTABLE: - return GC_malloc_atomic_uncollectable(lb); # endif + return GC_generic_malloc_uncollectable(lb, knd); default: return GC_generic_malloc(lb, knd); } @@ -515,59 +514,6 @@ GC_API int GC_CALL GC_posix_memalign(void **memptr, size_t align, size_t lb) return 0; } -#ifdef GC_ATOMIC_UNCOLLECTABLE - /* Allocate lb bytes of pointer-free, untraced, uncollectible data */ - /* This is normally roughly equivalent to the system malloc. */ - /* But it may be useful if malloc is redefined. */ - GC_API GC_ATTR_MALLOC void * GC_CALL - GC_malloc_atomic_uncollectable(size_t lb) - { - void *op; - size_t lg; - DCL_LOCK_STATE; - - if (SMALL_OBJ(lb)) { - GC_DBG_COLLECT_AT_MALLOC(lb); - if (EXTRA_BYTES != 0 && lb != 0) lb--; - /* We don't need the extra byte, since this won't be */ - /* collected anyway. */ - lg = GC_size_map[lb]; - LOCK(); - op = GC_freelists[AUNCOLLECTABLE][lg]; - if (EXPECT(op != 0, TRUE)) { - GC_freelists[AUNCOLLECTABLE][lg] = obj_link(op); - obj_link(op) = 0; - GC_bytes_allocd += GRANULES_TO_BYTES(lg); - /* Mark bit was already set while object was on free list. */ - GC_non_gc_bytes += GRANULES_TO_BYTES(lg); - UNLOCK(); - } else { - UNLOCK(); - op = (ptr_t)GC_generic_malloc(lb, AUNCOLLECTABLE); - } - GC_ASSERT(0 == op || GC_is_marked(op)); - return((void *) op); - } else { - hdr * hhdr; - - op = (ptr_t)GC_generic_malloc(lb, AUNCOLLECTABLE); - if (0 == op) return(0); - - GC_ASSERT(((word)op & (HBLKSIZE - 1)) == 0); - hhdr = HDR(op); - - LOCK(); - set_mark_bit_from_hdr(hhdr, 0); /* Only object. */ -# ifndef THREADS - GC_ASSERT(hhdr -> hb_n_marks == 0); -# endif - hhdr -> hb_n_marks = 1; - UNLOCK(); - return((void *) op); - } - } -#endif /* GC_ATOMIC_UNCOLLECTABLE */ - /* provide a version of strdup() that uses the collector to allocate the copy of the string */ GC_API GC_ATTR_MALLOC char * GC_CALL GC_strdup(const char *s)