CFLAGS_EXTRA=
CFLAGS= -O -I$(srcdir)/include -I$(AO_SRC_DIR)/src \
- -DATOMIC_UNCOLLECTABLE -DNO_EXECUTE_PERMISSION -DALL_INTERIOR_POINTERS \
+ -DGC_ATOMIC_UNCOLLECTABLE -DNO_EXECUTE_PERMISSION -DALL_INTERIOR_POINTERS \
$(CFLAGS_EXTRA)
# To build the parallel collector on Linux, add to the above:
SOPT= $(OPT) $(IGNORE) \
DEFINE AMIGA_SKIP_SEG \
-DEFINE ATOMIC_UNCOLLECTABLE \
+DEFINE GC_ATOMIC_UNCOLLECTABLE \
DEFINE GC_AMIGA_FASTALLOC \
DEFINE GC_AMIGA_RETRY \
DEFINE GC_AMIGA_PRINTSTATS \
[AC_HELP_STRING([--disable-atomic-uncollectible],
[Disable support for atomic uncollectible allocation.])])
if test x"$enable_atomic_uncollectible" != x"no"; then
- AC_DEFINE(ATOMIC_UNCOLLECTABLE, 1,
+ AC_DEFINE([GC_ATOMIC_UNCOLLECTABLE], 1,
[Define to enable atomic uncollectible allocation.])
fi
case UNCOLLECTABLE:
kind_str = "UNCOLLECTABLE";
break;
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
case AUNCOLLECTABLE:
kind_str = "ATOMIC_UNCOLLECTABLE";
break;
return (GC_store_debug_info(result, (word)lb, s, i));
}
-#ifdef ATOMIC_UNCOLLECTABLE
+#ifdef GC_ATOMIC_UNCOLLECTABLE
GC_API GC_ATTR_MALLOC void * GC_CALL
GC_debug_malloc_atomic_uncollectable(size_t lb, GC_EXTRA_PARAMS)
{
ADD_CALL_CHAIN(result, ra);
return (GC_store_debug_info(result, (word)lb, s, i));
}
-#endif /* ATOMIC_UNCOLLECTABLE */
+#endif /* GC_ATOMIC_UNCOLLECTABLE */
#ifndef GC_FREED_MEM_MARKER
# if CPP_WORDSZ == 32
} else {
hdr * hhdr = HDR(p);
if (hhdr -> hb_obj_kind == UNCOLLECTABLE
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
|| hhdr -> hb_obj_kind == AUNCOLLECTABLE
# endif
) {
case UNCOLLECTABLE:
result = GC_debug_malloc_uncollectable(lb, OPT_RA s, i);
break;
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
case AUNCOLLECTABLE:
result = GC_debug_malloc_atomic_uncollectable(lb, OPT_RA s, i);
break;
return GC_debug_malloc(lb, OPT_RA s, i);
case UNCOLLECTABLE:
return GC_debug_malloc_uncollectable(lb, OPT_RA s, i);
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
case AUNCOLLECTABLE:
return GC_debug_malloc_atomic_uncollectable(lb, OPT_RA s, i);
# endif
#define ALL_INTERIOR_POINTERS // follows interior pointers.
//#define DONT_ADD_BYTE_AT_END // disables the padding if defined.
//#define SMALL_CONFIG // whether to use a smaller heap.
- #define ATOMIC_UNCOLLECTABLE // GC_malloc_atomic_uncollectable()
+ #define GC_ATOMIC_UNCOLLECTABLE // GC_malloc_atomic_uncollectable()
// define either or none as per personal preference
// used in malloc.c
GC_NO_FINALIZATION Exclude finalization support (for smaller code size)
-ATOMIC_UNCOLLECTABLE Includes code for GC_malloc_atomic_uncollectable.
+GC_ATOMIC_UNCOLLECTABLE Includes code for GC_malloc_atomic_uncollectable.
This is useful if either the vendor malloc implementation is poor,
or if REDIRECT_MALLOC is used.
// printf("trying to force... %d bytes... ",size);
if(
AllocFunction!=GC_malloc_uncollectable
-#ifdef ATOMIC_UNCOLLECTABLE
+#ifdef GC_ATOMIC_UNCOLLECTABLE
&& AllocFunction!=GC_malloc_atomic_uncollectable
#endif
){
// It also doesn't yet understand the new header file names or
// namespaces.
//
-// This assumes the collector has been compiled with -DATOMIC_UNCOLLECTABLE.
+// This assumes the collector has been compiled with -DGC_ATOMIC_UNCOLLECTABLE.
// The user should also consider -DREDIRECT_MALLOC=GC_uncollectable_malloc,
// to ensure that object allocated through malloc are traced.
//
# include "gcconfig.h"
#endif
+#if !defined(GC_ATOMIC_UNCOLLECTABLE) && defined(ATOMIC_UNCOLLECTABLE)
+ /* For compatibility with old-style naming. */
+# define GC_ATOMIC_UNCOLLECTABLE
+#endif
+
#ifndef GC_INNER
/* This tagging macro must be used at the start of every variable */
/* definition which is declared with GC_EXTERN. Should be also used */
/* objects on this and auobjfreelist */
/* are always marked, except during */
/* garbage collections. */
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
# define GC_auobjfreelist GC_arrays._auobjfreelist
void *_auobjfreelist[MAXOBJGRANULES+1];
/* Atomic uncollectible but traced objs */
#define PTRFREE 0
#define NORMAL 1
#define UNCOLLECTABLE 2
-#ifdef ATOMIC_UNCOLLECTABLE
+#ifdef GC_ATOMIC_UNCOLLECTABLE
# define AUNCOLLECTABLE 3
# define STUBBORN 4
# define IS_UNCOLLECTABLE(k) (((k) & ~1) == UNCOLLECTABLE)
void ** const GC_objfreelist_ptr = GC_objfreelist;
void ** const GC_aobjfreelist_ptr = GC_aobjfreelist;
void ** const GC_uobjfreelist_ptr = GC_uobjfreelist;
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
void ** const GC_auobjfreelist_ptr = GC_auobjfreelist;
# endif
return GC_malloc(lb);
case UNCOLLECTABLE:
return GC_malloc_uncollectable(lb);
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
case AUNCOLLECTABLE:
return GC_malloc_atomic_uncollectable(lb);
-# endif /* ATOMIC_UNCOLLECTABLE */
+# endif
default:
return GC_generic_malloc(lb, knd);
}
return 0;
}
-#ifdef ATOMIC_UNCOLLECTABLE
+#ifdef GC_ATOMIC_UNCOLLECTABLE
/* Allocate lb bytes of pointer-free, untraced, uncollectible data */
/* This is normally roughly equivalent to the system malloc. */
/* But it may be useful if malloc is redefined. */
return((void *) op);
}
}
-#endif /* ATOMIC_UNCOLLECTABLE */
+#endif /* GC_ATOMIC_UNCOLLECTABLE */
/* provide a version of strdup() that uses the collector to allocate the
copy of the string */
{ &GC_uobjfreelist[0], 0,
0 | GC_DS_LENGTH, TRUE /* add length to descr */, TRUE
/*, */ OK_DISCLAIM_INITZ },
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
/* AUNCOLLECTABLE */
{ &GC_auobjfreelist[0], 0,
0 | GC_DS_LENGTH, FALSE /* add length to descr */, FALSE
# endif
};
-# ifdef ATOMIC_UNCOLLECTABLE
+# ifdef GC_ATOMIC_UNCOLLECTABLE
# ifdef STUBBORN_ALLOC
# define GC_N_KINDS_INITIAL_VALUE 5
# else
# else
# define GC_N_KINDS_INITIAL_VALUE 3
# endif
-# endif
+# endif /* !GC_ATOMIC_UNCOLLECTABLE */
GC_INNER unsigned GC_n_kinds = GC_N_KINDS_INITIAL_VALUE;