* an object of (small) size lb as follows:
*
* lg = GC_size_map[lb];
- * op = GC_objfreelist[lg];
+ * op = GC_freelists[NORMAL][lg];
* if (NULL == op) {
* op = GENERAL_MALLOC(lb, NORMAL);
* } else {
- * GC_objfreelist[lg] = obj_link(op);
+ * GC_freelists[NORMAL][lg] = obj_link(op);
* }
*
* Note that this is very fast if the free list is non-empty; it should
ptr_t p;
if (sz > MAXOBJWORDS) return(FALSE);
- for (p = GC_sobjfreelist[sz]; p != 0; p = obj_link(p)) {
+ for (p = GC_freelists[STUBBORN][sz]; p != 0; p = obj_link(p)) {
if (HBLKPTR(p) == h) return(TRUE);
}
return(FALSE);
/* as described in gc_mark.h. */
} mse;
+/* Predefined kinds: */
+#define PTRFREE 0
+#define NORMAL 1
+#define UNCOLLECTABLE 2
+#ifdef GC_ATOMIC_UNCOLLECTABLE
+# define AUNCOLLECTABLE 3
+# define STUBBORN 4
+# define IS_UNCOLLECTABLE(k) (((k) & ~1) == UNCOLLECTABLE)
+#else
+# define STUBBORN 3
+# define IS_UNCOLLECTABLE(k) ((k) == UNCOLLECTABLE)
+#endif
+
/* Lists of all heap blocks and free lists */
/* as well as other random data structures */
/* that should not be scanned by the */
/* multi-threaded case, we currently only save the */
/* calling stack. */
# endif
+# ifndef PREDEFINED_KINDS
+# ifdef STUBBORN_ALLOC
+# define PREDEFINED_KINDS (STUBBORN+1)
+# else
+# define PREDEFINED_KINDS STUBBORN
+# endif
+# endif
# ifndef SEPARATE_GLOBALS
-# define GC_objfreelist GC_arrays._objfreelist
- void *_objfreelist[MAXOBJGRANULES+1];
- /* free list for objects */
-# define GC_aobjfreelist GC_arrays._aobjfreelist
- void *_aobjfreelist[MAXOBJGRANULES+1];
- /* free list for atomic objs */
-# endif
- void *_uobjfreelist[MAXOBJGRANULES+1];
- /* Uncollectible but traced objs */
- /* objects on this and auobjfreelist */
- /* are always marked, except during */
- /* garbage collections. */
-# ifdef GC_ATOMIC_UNCOLLECTABLE
-# define GC_auobjfreelist GC_arrays._auobjfreelist
- void *_auobjfreelist[MAXOBJGRANULES+1];
- /* Atomic uncollectible but traced objs */
+# define GC_freelists GC_arrays._freelists
+ void *_freelists[PREDEFINED_KINDS][MAXOBJGRANULES + 1];
+ /* Array of free lists for objects of predefined kinds: */
+ /* normal, atomic, uncollectible, atomic uncollectible */
+ /* and immutable. */
# endif
size_t _size_map[MAXOBJBYTES+1];
/* Number of granules to allocate when asked for a certain */
/* number of bytes. */
-# ifdef STUBBORN_ALLOC
-# define GC_sobjfreelist GC_arrays._sobjfreelist
- ptr_t _sobjfreelist[MAXOBJGRANULES+1];
- /* Free list for immutable objects. */
-# endif
# ifdef MARK_BIT_PER_GRANULE
# define GC_obj_map GC_arrays._obj_map
short * _obj_map[MAXOBJGRANULES+1];
#define GC_size_map GC_arrays._size_map
#define GC_static_roots GC_arrays._static_roots
#define GC_top_index GC_arrays._top_index
-#define GC_uobjfreelist GC_arrays._uobjfreelist
#define GC_valid_offsets GC_arrays._valid_offsets
#define beginGC_arrays ((ptr_t)(&GC_arrays))
#ifdef SEPARATE_GLOBALS
extern word GC_bytes_allocd;
/* Number of bytes allocated during this collection cycle. */
- extern ptr_t GC_objfreelist[MAXOBJGRANULES+1];
- /* free list for NORMAL objects */
-# define beginGC_objfreelist ((ptr_t)(&GC_objfreelist))
-# define endGC_objfreelist (beginGC_objfreelist + sizeof(GC_objfreelist))
-
- extern ptr_t GC_aobjfreelist[MAXOBJGRANULES+1];
- /* free list for atomic (PTRFREE) objs */
-# define beginGC_aobjfreelist ((ptr_t)(&GC_aobjfreelist))
-# define endGC_aobjfreelist (beginGC_aobjfreelist + sizeof(GC_aobjfreelist))
+ extern void *GC_freelists[PREDEFINED_KINDS][MAXOBJGRANULES + 1];
+ /* Array of free lists for objects of predefined kinds. */
#endif /* SEPARATE_GLOBALS */
-/* Predefined kinds: */
-#define PTRFREE 0
-#define NORMAL 1
-#define UNCOLLECTABLE 2
-#ifdef GC_ATOMIC_UNCOLLECTABLE
-# define AUNCOLLECTABLE 3
-# define STUBBORN 4
-# define IS_UNCOLLECTABLE(k) (((k) & ~1) == UNCOLLECTABLE)
-#else
-# define STUBBORN 3
-# define IS_UNCOLLECTABLE(k) ((k) == UNCOLLECTABLE)
-#endif
-
GC_EXTERN unsigned GC_n_kinds;
GC_EXTERN word GC_n_heap_sects; /* Number of separately added heap */
GC_DBG_COLLECT_AT_MALLOC(lb);
lg = GC_size_map[lb];
LOCK();
- op = GC_aobjfreelist[lg];
+ op = GC_freelists[PTRFREE][lg];
if (EXPECT(0 == op, FALSE)) {
UNLOCK();
return(GENERAL_MALLOC((word)lb, PTRFREE));
}
- GC_aobjfreelist[lg] = obj_link(op);
+ GC_freelists[PTRFREE][lg] = obj_link(op);
GC_bytes_allocd += GRANULES_TO_BYTES(lg);
UNLOCK();
return((void *) op);
GC_DBG_COLLECT_AT_MALLOC(lb);
lg = GC_size_map[lb];
LOCK();
- op = GC_objfreelist[lg];
+ op = GC_freelists[NORMAL][lg];
if (EXPECT(0 == op, FALSE)) {
UNLOCK();
return (GENERAL_MALLOC((word)lb, NORMAL));
<= (word)GC_greatest_plausible_heap_addr
&& (word)obj_link(op)
>= (word)GC_least_plausible_heap_addr));
- GC_objfreelist[lg] = obj_link(op);
+ GC_freelists[NORMAL][lg] = obj_link(op);
obj_link(op) = 0;
GC_bytes_allocd += GRANULES_TO_BYTES(lg);
UNLOCK();
/* collected anyway. */
lg = GC_size_map[lb];
LOCK();
- op = GC_uobjfreelist[lg];
+ op = GC_freelists[UNCOLLECTABLE][lg];
if (EXPECT(op != 0, TRUE)) {
- GC_uobjfreelist[lg] = obj_link(op);
+ GC_freelists[UNCOLLECTABLE][lg] = obj_link(op);
obj_link(op) = 0;
GC_bytes_allocd += GRANULES_TO_BYTES(lg);
/* Mark bit ws already set on free list. It will be */
/* Some externally visible but unadvertised variables to allow access to */
/* free lists from inlined allocators without including gc_priv.h */
/* or introducing dependencies on internal data structure layouts. */
-void ** const GC_objfreelist_ptr = GC_objfreelist;
-void ** const GC_aobjfreelist_ptr = GC_aobjfreelist;
-void ** const GC_uobjfreelist_ptr = GC_uobjfreelist;
+void ** const GC_objfreelist_ptr = GC_freelists[NORMAL];
+void ** const GC_aobjfreelist_ptr = GC_freelists[PTRFREE];
+void ** const GC_uobjfreelist_ptr = GC_freelists[UNCOLLECTABLE];
# ifdef GC_ATOMIC_UNCOLLECTABLE
- void ** const GC_auobjfreelist_ptr = GC_auobjfreelist;
+ void ** const GC_auobjfreelist_ptr = GC_freelists[AUNCOLLECTABLE];
# endif
GC_API int GC_CALL GC_get_kind_and_size(const void * p, size_t * psize)
/* collected anyway. */
lg = GC_size_map[lb];
LOCK();
- op = GC_auobjfreelist[lg];
+ op = GC_freelists[AUNCOLLECTABLE][lg];
if (EXPECT(op != 0, TRUE)) {
- GC_auobjfreelist[lg] = obj_link(op);
+ GC_freelists[AUNCOLLECTABLE][lg] = obj_link(op);
obj_link(op) = 0;
GC_bytes_allocd += GRANULES_TO_BYTES(lg);
/* Mark bit was already set while object was on free list. */
/* GC_init is called. */
/* It's done here, since we need to deal with mark descriptors. */
GC_INNER struct obj_kind GC_obj_kinds[MAXOBJKINDS] = {
-/* PTRFREE */ { &GC_aobjfreelist[0], 0 /* filled in dynamically */,
+/* PTRFREE */ { &GC_freelists[PTRFREE][0], 0 /* filled in dynamically */,
0 | GC_DS_LENGTH, FALSE, FALSE
/*, */ OK_DISCLAIM_INITZ },
-/* NORMAL */ { &GC_objfreelist[0], 0,
+/* NORMAL */ { &GC_freelists[NORMAL][0], 0,
0 | GC_DS_LENGTH, /* Adjusted in GC_init for EXTRA_BYTES */
TRUE /* add length to descr */, TRUE
/*, */ OK_DISCLAIM_INITZ },
/* UNCOLLECTABLE */
- { &GC_uobjfreelist[0], 0,
+ { &GC_freelists[UNCOLLECTABLE][0], 0,
0 | GC_DS_LENGTH, TRUE /* add length to descr */, TRUE
/*, */ OK_DISCLAIM_INITZ },
# ifdef GC_ATOMIC_UNCOLLECTABLE
/* AUNCOLLECTABLE */
- { &GC_auobjfreelist[0], 0,
+ { &GC_freelists[AUNCOLLECTABLE][0], 0,
0 | GC_DS_LENGTH, FALSE /* add length to descr */, FALSE
/*, */ OK_DISCLAIM_INITZ },
# endif
# ifdef STUBBORN_ALLOC
-/*STUBBORN*/ { (void **)&GC_sobjfreelist[0], 0,
+/*STUBBORN*/ { (void **)&GC_freelists[STUBBORN][0], 0,
0 | GC_DS_LENGTH, TRUE /* add length to descr */, TRUE
/*, */ OK_DISCLAIM_INITZ },
# endif
GC_exclude_static_roots_inner(beginGC_arrays, endGC_arrays);
GC_exclude_static_roots_inner(beginGC_obj_kinds, endGC_obj_kinds);
# ifdef SEPARATE_GLOBALS
- GC_exclude_static_roots_inner(beginGC_objfreelist, endGC_objfreelist);
- GC_exclude_static_roots_inner(beginGC_aobjfreelist, endGC_aobjfreelist);
+ GC_exclude_static_roots_inner((ptr_t)GC_freelists,
+ (ptr_t)GC_freelists + sizeof(GC_freelists));
# endif
# if defined(USE_PROC_FOR_LIBRARIES) && defined(GC_LINUX_THREADS)
WARN("USE_PROC_FOR_LIBRARIES + GC_LINUX_THREADS performs poorly.\n", 0);
{
/* We currently only do this from the thread itself or from */
/* the fork handler for a child process. */
- return_freelists(p -> ptrfree_freelists, GC_aobjfreelist);
- return_freelists(p -> normal_freelists, GC_objfreelist);
+ return_freelists(p -> ptrfree_freelists, GC_freelists[PTRFREE]);
+ return_freelists(p -> normal_freelists, GC_freelists[NORMAL]);
# ifdef GC_GCJ_SUPPORT
return_freelists(p -> gcj_freelists, (void **)GC_gcjobjfreelist);
# endif