while (*flh == 0) {
ENTER_GC();
- /* Do our share of marking work */
- if(TRUE_INCREMENTAL) GC_collect_a_little_inner(1);
+# ifndef GC_DISABLE_INCREMENTAL
+ if (GC_incremental && GC_time_limit != GC_TIME_UNLIMITED) {
+ /* True incremental mode, not just generational. */
+ /* Do our share of marking work. */
+ GC_collect_a_little_inner(1);
+ }
+# endif
/* Sweep blocks for objects of this size */
GC_ASSERT(!GC_is_full_gc
|| NULL == GC_obj_kinds[kind].ok_reclaim_list
}
# ifdef MPROTECT_VDB
- if(GC_incremental) {
+ if (GC_auto_incremental) {
GC_mprotect_stop();
}
# endif
GC_log_printf("World starting\n");
# endif
# ifdef MPROTECT_VDB
- if(GC_incremental) {
+ if (GC_auto_incremental) {
GC_mprotect_resume();
}
# endif
performance may actually be better with `mprotect` and signals.)
* (`PCR_VDB`) By relying on an external dirty bit implementation, in this
case the one in Xerox PCR.
- * (`MANUAL_VDB`) Through explicit mutator cooperation. This requires the
- client code to call `GC_end_stubborn_change` (followed by a number of
- `GC_reachable_here` calls), and is rarely used.
+ * Through explicit mutator cooperation. This enabled by
+ `GC_set_manual_vdb_allowed(1)` call, and requires the client code to call
+ `GC_ptr_store_and_dirty` or `GC_end_stubborn_change` (followed by a number
+ of `GC_reachable_here` calls), and is rarely used.
* (`DEFAULT_VDB`) By treating all pages as dirty. This is the default
if none of the other techniques is known to be usable. (Practical only for
testing.)
/* Inform the collector that the object has been changed. */
/* Only non-NULL pointer stores into the object are considered to be */
-/* changes. Matters only if the library has been compiled with */
-/* MANUAL_VDB defined (otherwise the function does nothing). */
+/* changes. Matters only if the incremental collection is enabled in */
+/* the manual VDB mode (otherwise the function does nothing). */
/* Should be followed typically by GC_reachable_here called for each */
/* of the stored pointers. */
GC_API void GC_CALL GC_end_stubborn_change(const void *) GC_ATTR_NONNULL(1);
/* both functions is equal. */
GC_API void GC_CALL GC_enable(void);
+/* Select whether to use the manual VDB mode for the incremental */
+/* collection. Has no effect if called after enabling the incremental */
+/* collection. The default value is off unless the collector is */
+/* compiled with MANUAL_VDB defined. The manual VDB mode should be */
+/* used only if the client has the appropriate GC_END_STUBBORN_CHANGE */
+/* and GC_reachable_here (or, alternatively, GC_PTR_STORE_AND_DIRTY) */
+/* calls (to ensure proper write barriers). Both the setter and getter */
+/* are not synchronized, and are defined only if the library has been */
+/* compiled without SMALL_CONFIG. */
+GC_API void GC_CALL GC_set_manual_vdb_allowed(int);
+GC_API int GC_CALL GC_get_manual_vdb_allowed(void);
+
/* Enable incremental/generational collection. Not advisable unless */
/* dirty bits are available or most heap objects are pointer-free */
/* (atomic) or immutable. Don't use in leak finding mode. Ignored if */
char _valid_offsets[VALID_OFFSET_SZ];
/* GC_valid_offsets[i] == TRUE ==> i */
/* is registered as a displacement. */
-# if defined(PROC_VDB) || defined(MPROTECT_VDB) \
- || defined(GWW_VDB) || defined(MANUAL_VDB)
+# ifndef GC_DISABLE_INCREMENTAL
# define GC_grungy_pages GC_arrays._grungy_pages
page_hash_table _grungy_pages; /* Pages that were dirty at last */
/* GC_read_dirty. */
-# endif
-# if defined(MPROTECT_VDB) || defined(MANUAL_VDB)
# define GC_dirty_pages GC_arrays._dirty_pages
volatile page_hash_table _dirty_pages;
/* Pages dirtied since last GC_read_dirty. */
extern word GC_free_bytes[]; /* Both remain visible to GNU GCJ. */
#endif
-#ifdef GC_DISABLE_INCREMENTAL
-# define GC_incremental FALSE
- /* Hopefully allow optimizer to remove some code. */
-# define TRUE_INCREMENTAL FALSE
-#else
- GC_EXTERN GC_bool GC_incremental;
- /* Using incremental/generational collection. */
- /* Assumes dirty bits are being maintained. */
-# define TRUE_INCREMENTAL \
- (GC_incremental && GC_time_limit != GC_TIME_UNLIMITED)
- /* True incremental, not just generational, mode */
-#endif /* !GC_DISABLE_INCREMENTAL */
-
GC_EXTERN word GC_root_size; /* Total size of registered root sections. */
GC_EXTERN GC_bool GC_debugging_started;
/* accompanying routines are no-op in such a case. */
#endif
-#ifndef GC_DISABLE_INCREMENTAL
+#ifdef GC_DISABLE_INCREMENTAL
+# define GC_incremental FALSE
+# define GC_auto_incremental FALSE
+# define GC_manual_vdb FALSE
+# define GC_dirty(p) (void)(p)
+# define REACHABLE_AFTER_DIRTY(p) (void)(p)
+
+#else /* !GC_DISABLE_INCREMENTAL */
+ GC_EXTERN GC_bool GC_incremental;
+ /* Using incremental/generational collection. */
+ /* Assumes dirty bits are being maintained. */
+
/* Virtual dirty bit implementation: */
/* Each implementation exports the following: */
GC_INNER void GC_read_dirty(GC_bool output_unneeded);
/* Returns true if dirty bits are maintained (otherwise */
/* it is OK to be called again if the client invokes */
/* GC_enable_incremental once more). */
-#endif /* !GC_DISABLE_INCREMENTAL */
-#ifdef MANUAL_VDB
+ GC_EXTERN GC_bool GC_manual_vdb;
+ /* The incremental collection is in the manual VDB */
+ /* mode. Assumes GC_incremental is true. Should not */
+ /* be modified once GC_incremental is set to true. */
+
+# define GC_auto_incremental (GC_incremental && !GC_manual_vdb)
+
GC_INNER void GC_dirty_inner(const void *p); /* does not require locking */
-# define GC_dirty(p) (GC_incremental ? GC_dirty_inner(p) : (void)0)
+# define GC_dirty(p) (GC_manual_vdb ? GC_dirty_inner(p) : (void)0)
# define REACHABLE_AFTER_DIRTY(p) GC_reachable_here(p)
-#else
-# define GC_dirty(p) (void)(p)
-# define REACHABLE_AFTER_DIRTY(p) (void)(p)
-#endif
+#endif /* !GC_DISABLE_INCREMENTAL */
/* Same as GC_base but excepts and returns a pointer to const object. */
#define GC_base_C(p) ((const void *)GC_base((/* no const */ void *)(p)))
# define MUNMAP_THRESHOLD 2
#endif
-#if defined(GC_DISABLE_INCREMENTAL) || defined(DEFAULT_VDB) \
- || defined(MANUAL_VDB)
+#if defined(GC_DISABLE_INCREMENTAL) || defined(DEFAULT_VDB)
# undef GWW_VDB
# undef MPROTECT_VDB
# undef PCR_VDB
#endif
#if !defined(PCR_VDB) && !defined(PROC_VDB) && !defined(MPROTECT_VDB) \
- && !defined(GWW_VDB) && !defined(DEFAULT_VDB) && !defined(MANUAL_VDB) \
+ && !defined(GWW_VDB) && !defined(DEFAULT_VDB) \
&& !defined(GC_DISABLE_INCREMENTAL)
# define DEFAULT_VDB
#endif
DCL_LOCK_STATE;
GC_ASSERT(lb != 0 && (lb & (GRANULE_BYTES-1)) == 0);
- if (!SMALL_OBJ(lb)
-# ifdef MANUAL_VDB
- /* Currently a single object is allocated. */
- /* TODO: GC_dirty should be called for each linked object (but */
- /* the last one) to support multiple objects allocation. */
- || GC_incremental
-# endif
- ) {
+ /* Currently a single object is always allocated if manual VDB. */
+ /* TODO: GC_dirty should be called for each linked object (but */
+ /* the last one) to support multiple objects allocation. */
+ if (!SMALL_OBJ(lb) || GC_manual_vdb) {
op = GC_generic_malloc(lb, k);
if (EXPECT(0 != op, TRUE))
obj_link(op) = 0;
*result = op;
-# ifdef MANUAL_VDB
- if (GC_is_heap_ptr(result)) {
- GC_dirty(result);
+# ifndef GC_DISABLE_INCREMENTAL
+ if (GC_manual_vdb && GC_is_heap_ptr(result)) {
+ GC_dirty_inner(result);
REACHABLE_AFTER_DIRTY(op);
}
# endif
/* Don't recycle a stack segment obtained with the wrong flags. */
/* Win32 GetWriteWatch requires the right kind of memory. */
static GC_bool GC_incremental_at_stack_alloc = FALSE;
- GC_bool recycle_old = (!GC_incremental || GC_incremental_at_stack_alloc);
+ GC_bool recycle_old = !GC_auto_incremental
+ || GC_incremental_at_stack_alloc;
- GC_incremental_at_stack_alloc = GC_incremental;
+ GC_incremental_at_stack_alloc = GC_auto_incremental;
# else
# define recycle_old TRUE
# endif
GC_push_selected((ptr_t)bottom, (ptr_t)top, GC_page_was_dirty);
} else {
# ifdef PROC_VDB
- if (GC_incremental) {
+ if (GC_auto_incremental) {
/* Pages that were never dirtied cannot contain pointers. */
GC_push_selected((ptr_t)bottom, (ptr_t)top, GC_page_was_ever_dirty);
} else
GC_INNER void GC_push_all_stack(ptr_t bottom, ptr_t top)
{
-# if defined(THREADS) && defined(MPROTECT_VDB)
- GC_push_all_eager(bottom, top);
-# else
# ifndef NEED_FIXUP_POINTER
- if (GC_all_interior_pointers) {
+ if (GC_all_interior_pointers
+# if defined(THREADS) && defined(MPROTECT_VDB)
+ && !GC_auto_incremental
+# endif
+ ) {
GC_push_all(bottom, top);
} else
# endif
/* else */ {
GC_push_all_eager(bottom, top);
}
-# endif
}
#if defined(WRAP_MARK_SOME) && defined(PARALLEL_MARK)
* register values are not lost.
* Cold_gc_frame delimits the stack section that must be scanned
* eagerly. A zero value indicates that no eager scanning is needed.
- * We don't need to worry about the MANUAL_VDB case here, since this
+ * We don't need to worry about the manual VDB case here, since this
* is only called in the single-threaded case. We assume that we
* cannot collect between an assignment and the corresponding
* GC_dirty() call.
# elif defined(THREADS) || (defined(DARWIN) && defined(MPROTECT_VDB))
if (!GC_is_initialized && value) {
# ifndef SMALL_CONFIG
- GC_init(); /* just to initialize GC_stderr */
+ GC_init(); /* to initialize GC_manual_vdb and GC_stderr */
+# ifndef THREADS
+ if (GC_manual_vdb)
+ return;
+# endif
# endif
ABORT("fork() handling unsupported");
}
void * context GC_ATTR_UNUSED) {}
#endif
+#ifndef SMALL_CONFIG
+# ifdef MANUAL_VDB
+ static GC_bool manual_vdb_allowed = TRUE;
+# else
+ static GC_bool manual_vdb_allowed = FALSE;
+# endif
+
+ GC_API void GC_CALL GC_set_manual_vdb_allowed(int value)
+ {
+ manual_vdb_allowed = (GC_bool)value;
+ }
+
+ GC_API int GC_CALL GC_get_manual_vdb_allowed(void)
+ {
+ return (int)manual_vdb_allowed;
+ }
+#endif /* !SMALL_CONFIG */
+
STATIC word GC_parse_mem_size_arg(const char *str)
{
word result = 0; /* bad value */
# endif
# ifndef GC_DISABLE_INCREMENTAL
if (GC_incremental || 0 != GETENV("GC_ENABLE_INCREMENTAL")) {
- /* For GWW_VDB on Win32, this needs to happen before any */
- /* heap memory is allocated. */
- GC_incremental = GC_dirty_init();
- GC_ASSERT(GC_bytes_allocd == 0);
+# if defined(CHECKSUMS) || defined(SMALL_CONFIG)
+ /* TODO: Implement CHECKSUMS for manual VDB. */
+# else
+ if (manual_vdb_allowed) {
+ GC_manual_vdb = TRUE;
+ GC_incremental = TRUE;
+ } else
+# endif
+ /* else */ {
+ /* For GWW_VDB on Win32, this needs to happen before any */
+ /* heap memory is allocated. */
+ GC_incremental = GC_dirty_init();
+ GC_ASSERT(GC_bytes_allocd == 0);
+ }
}
# endif
GC_init();
LOCK();
} else {
- GC_incremental = GC_dirty_init();
+# if !defined(CHECKSUMS) && !defined(SMALL_CONFIG)
+ if (manual_vdb_allowed) {
+ GC_manual_vdb = TRUE;
+ GC_incremental = TRUE;
+ } else
+# endif
+ /* else */ {
+ GC_incremental = GC_dirty_init();
+ }
}
if (GC_incremental && !GC_dont_gc) {
/* Can't easily do it if GC_dont_gc. */
* DEFAULT_VDB: A simple dummy implementation that treats every page
* as possibly dirty. This makes incremental collection
* useless, but the implementation is still correct.
- * MANUAL_VDB: Stacks and static data are always considered dirty.
+ * Manual VDB: Stacks and static data are always considered dirty.
* Heap pages are considered dirty if GC_dirty(p) has been
* called on some pointer p pointing to somewhere inside
* an object on that page. A GC_dirty() call on a large
- * object directly dirties only a single page, but for
- * MANUAL_VDB we are careful to treat an object with a dirty
+ * object directly dirties only a single page, but for the
+ * manual VDB we are careful to treat an object with a dirty
* page as completely dirty.
* In order to avoid races, an object must be marked dirty
* after it is written, and a reference to the object
GC_INNER GC_bool GC_dirty_init(void)
{
GC_VERBOSE_LOG_PRINTF("Initializing DEFAULT_VDB...\n");
- return TRUE;
- }
-#endif /* DEFAULT_VDB */
-
-#ifdef MANUAL_VDB
- /* Initialize virtual dirty bit implementation. */
- GC_INNER GC_bool GC_dirty_init(void)
- {
- GC_VERBOSE_LOG_PRINTF("Initializing MANUAL_VDB...\n");
/* GC_dirty_pages and GC_grungy_pages are already cleared. */
return TRUE;
}
-
-# define async_set_pht_entry_from_index(db, index) \
- set_pht_entry_from_index(db, index) /* for now */
-
- /* Mark the page containing p as dirty. Logically, this dirties the */
- /* entire object. */
- GC_INNER void GC_dirty_inner(const void *p)
- {
- word index = PHT_HASH(p);
- async_set_pht_entry_from_index(GC_dirty_pages, index);
- }
-#endif /* MANUAL_VDB */
+#endif /* DEFAULT_VDB */
#ifdef MPROTECT_VDB
/*
#endif /* PCR_VDB */
#ifndef GC_DISABLE_INCREMENTAL
+ GC_INNER GC_bool GC_manual_vdb = FALSE;
+
+ /* Manually mark the page containing p as dirty. Logically, this */
+ /* dirties the entire object. */
+ GC_INNER void GC_dirty_inner(const void *p)
+ {
+ word index = PHT_HASH(p);
+
+# if defined(MPROTECT_VDB)
+ /* Do not update GC_dirty_pages if it should be followed by the */
+ /* page unprotection. */
+ GC_ASSERT(GC_manual_vdb);
+# endif
+ set_pht_entry_from_index(GC_dirty_pages, index); /* FIXME: concurrent */
+ }
+
/* Retrieve system dirty bits for the heap to a local buffer (unless */
/* output_unneeded). Restore the systems notion of which pages are */
/* dirty. We assume that either the world is stopped or it is OK to */
/* lose dirty bits while it's happening (as in GC_enable_incremental).*/
GC_INNER void GC_read_dirty(GC_bool output_unneeded)
{
-# if defined(MANUAL_VDB) || defined(MPROTECT_VDB)
- if (!GC_GWW_AVAILABLE())
- {
- if (!output_unneeded)
- BCOPY((/* no volatile */ void *)GC_dirty_pages, GC_grungy_pages,
- sizeof(GC_dirty_pages));
- BZERO((/* no volatile */ void *)GC_dirty_pages,
+ if (GC_manual_vdb
+# if defined(MPROTECT_VDB)
+ || !GC_GWW_AVAILABLE()
+# endif
+ ) {
+ if (!output_unneeded)
+ BCOPY((/* no volatile */ void *)GC_dirty_pages, GC_grungy_pages,
sizeof(GC_dirty_pages));
-# ifdef MPROTECT_VDB
+ BZERO((/* no volatile */ void *)GC_dirty_pages,
+ sizeof(GC_dirty_pages));
+# ifdef MPROTECT_VDB
+ if (!GC_manual_vdb)
GC_protect_heap();
-# endif
- return;
- }
-# endif
+# endif
+ return;
+ }
# ifdef GWW_VDB
GC_gww_read_dirty(output_unneeded);
# elif defined(PROC_VDB)
GC_proc_read_dirty(output_unneeded);
# elif defined(PCR_VDB)
- (void)output_unneeded;
/* lazily enable dirty bits on newly added heap sects */
{
static int onhs = 0;
!= PCR_ERes_okay) {
ABORT("Dirty bit read failed");
}
-# elif !defined(MPROTECT_VDB)
- (void)output_unneeded;
# endif
}
GC_INNER GC_bool GC_page_was_dirty(struct hblk *h)
{
# ifdef PCR_VDB
- if ((word)h < (word)GC_vd_base
- || (word)h >= (word)(GC_vd_base + NPAGES*HBLKSIZE)) {
- return TRUE;
+ if (!GC_manual_vdb) {
+ if ((word)h < (word)GC_vd_base
+ || (word)h >= (word)(GC_vd_base + NPAGES * HBLKSIZE)) {
+ return TRUE;
+ }
+ return GC_grungy_bits[h-(struct hblk*)GC_vd_base] & PCR_VD_DB_dirtyBit;
}
- return GC_grungy_bits[h - (struct hblk*)GC_vd_base] & PCR_VD_DB_dirtyBit;
# elif defined(DEFAULT_VDB)
- (void)h;
- return TRUE;
-# else
- return NULL == HDR(h)
- || get_pht_entry_from_index(GC_grungy_pages, PHT_HASH(h));
+ if (!GC_manual_vdb)
+ return TRUE;
# endif
+ return NULL == HDR(h)
+ || get_pht_entry_from_index(GC_grungy_pages, PHT_HASH(h));
}
# if defined(CHECKSUMS) || defined(PROC_VDB)
{
# ifdef PCR_VDB
(void)is_ptrfree;
+ if (!GC_auto_incremental)
+ return;
PCR_VD_WriteProtectDisable(h, nblocks*HBLKSIZE);
PCR_VD_WriteProtectEnable(h, nblocks*HBLKSIZE);
# elif defined(MPROTECT_VDB)
struct hblk * h_end; /* Page boundary following block end */
struct hblk * current;
- if (!GC_incremental || GC_GWW_AVAILABLE())
+ if (!GC_auto_incremental || GC_GWW_AVAILABLE())
return;
h_trunc = (struct hblk *)((word)h & ~(GC_page_size-1));
h_end = (struct hblk *)(((word)(h + nblocks) + GC_page_size - 1)
{
if (!EXPECT(GC_is_initialized, TRUE)) GC_init();
# if defined(GC_DARWIN_THREADS) && defined(MPROTECT_VDB)
- if (GC_incremental) {
+ if (GC_auto_incremental) {
GC_ASSERT(0 == GC_handle_fork);
ABORT("Unable to fork while mprotect_thread is running");
}
# endif
GC_set_all_interior_pointers(0); /* for a stricter test */
+# ifdef TEST_MANUAL_VDB
+ GC_set_manual_vdb_allowed(1);
+# endif
GC_INIT();
GC_init_finalized_malloc();
# ifndef NO_INCREMENTAL
GC_TEST_EXPORT_API void * libsrl_init(void)
{
+# ifdef TEST_MANUAL_VDB
+ GC_set_manual_vdb_allowed(1);
+# endif
# ifndef STATICROOTSLIB_INIT_IN_MAIN
GC_INIT();
# endif
#include <stdarg.h>
+#ifdef TEST_MANUAL_VDB
+# define INIT_MANUAL_VDB_ALLOWED GC_set_manual_vdb_allowed(1)
+#elif !defined(SMALL_CONFIG)
+# define INIT_MANUAL_VDB_ALLOWED GC_set_manual_vdb_allowed(0)
+#else
+# define INIT_MANUAL_VDB_ALLOWED /* empty */
+#endif
+
#define CHECK_GCLIB_VERSION \
if (GC_get_version() != ((GC_VERSION_MAJOR<<16) \
| (GC_VERSION_MINOR<<8) \
#endif
#define GC_COND_INIT() \
- INIT_FORK_SUPPORT; GC_OPT_INIT; CHECK_GCLIB_VERSION; \
+ INIT_FORK_SUPPORT; INIT_MANUAL_VDB_ALLOWED; \
+ GC_OPT_INIT; CHECK_GCLIB_VERSION; \
INIT_PRINT_STATS; INIT_FIND_LEAK; INIT_PERF_MEASUREMENT
#define CHECK_OUT_OF_MEMORY(p) \
GC_enable_incremental();
# endif
if (GC_is_incremental_mode()) {
- GC_printf("Switched to incremental mode\n");
+# ifndef SMALL_CONFIG
+ if (GC_get_manual_vdb_allowed()) {
+ GC_printf("Switched to incremental mode (manual VDB)\n");
+ } else
+# endif
+ /* else */ {
+ GC_printf("Switched to incremental mode\n");
# ifdef PROC_VDB
GC_printf("Reading dirty bits from /proc\n");
# elif defined(GWW_VDB)
# elif defined(MPROTECT_VDB)
GC_printf("Emulating dirty bits with mprotect/signals\n");
# endif /* MPROTECT_VDB && !GWW_VDB */
+ }
}
# endif
set_print_procs();
GC_enable_incremental();
# endif
if (GC_is_incremental_mode()) {
- GC_printf("Switched to incremental mode\n");
-# ifdef MPROTECT_VDB
- GC_printf("Emulating dirty bits with mprotect/signals\n");
+# ifndef SMALL_CONFIG
+ if (GC_get_manual_vdb_allowed()) {
+ GC_printf("Switched to incremental mode (manual VDB)\n");
+ } else
# endif
+ /* else */ {
+ GC_printf("Switched to incremental mode\n");
+# ifdef MPROTECT_VDB
+ GC_printf("Emulating dirty bits with mprotect/signals\n");
+# endif
+ }
}
# endif
GC_set_min_bytes_allocd(1);
GC_set_all_interior_pointers(1);
/* needed due to C++ multiple inheritance used */
+# ifdef TEST_MANUAL_VDB
+ GC_set_manual_vdb_allowed(1);
+# endif
GC_INIT();
# ifndef NO_INCREMENTAL
GC_enable_incremental();
/* documentation. There is empirical evidence that it */
/* isn't. - HB */
# if defined(MPROTECT_VDB) && !defined(CYGWIN32)
- if (GC_incremental
+ if (GC_auto_incremental
# ifdef GWW_VDB
&& !GC_gww_dirty_init()
# endif
/* lock may be required for fault handling. */
#if defined(MPROTECT_VDB)
# define UNPROTECT_THREAD(t) \
- if (!GC_win32_dll_threads && GC_incremental && t != &first_thread) { \
+ if (!GC_win32_dll_threads && GC_auto_incremental \
+ && t != &first_thread) { \
GC_ASSERT(SMALL_OBJ(GC_size(t))); \
GC_remove_protection(HBLKPTR(t), 1, FALSE); \
} else (void)0