+2009-06-08 Hans Boehm <Hans.Boehm@hp.com> (Really Ivan Maidanski)
+ (diff91_cvs: resembling diff3, diff27, diff33, diff45, diff47, diff49,
+ diff60, diff67, diff68 partly)
+ * alloc.c (GC_FULL_FREQ, GC_DONT_EXPAND, GC_FREE_SPACE_DIVISOR,
+ GC_TIME_LIMIT): New macros (used to control the default initial
+ values of GC_full_freq variable, GC_dont_expand,
+ GC_free_space_divisor, GC_time_limit respectively).
+ * include/private/gc_priv.h (TIME_LIMIT): Remove macro (replaced
+ with GC_TIME_LIMIT in alloc.c).
+ * alloc.c (GC_need_full_gc, GC_stopped_mark, GC_finish_collection):
+ Define as STATIC.
+ * mark_rts.c (GC_push_current_stack, GC_push_gc_structures): Ditto.
+ * include/private/gc_priv.h (GC_stopped_mark, GC_finish_collection):
+ Move the prototypes to alloc.c, make STATIC.
+ * include/private/gc_priv.h (GC_push_current_stack,
+ GC_push_gc_structures, GC_push_regs_and_stack): Remove prototypes
+ (move the comments to the places where these functions are defined).
+ * mach_dep.c (GC_push_regs_and_stack): Move to mark_rts.c and define
+ as STATIC.
+ * alloc.c (GC_timeout_stop_func, GC_stopped_mark,
+ GC_print_heap_sects): Convert a group of printf() calls into
+ a single one (for output atomicity).
+ * mark_rts.c (GC_print_static_roots): Ditto.
+ * alloc.c (GC_stopped_mark): Output blank line (when logging) for
+ convenience to delimit collections.
+ * alloc.c (GC_clear_a_few_frames): Rename NWORDS to CLEAR_NWORDS;
+ make "frames" local variable volatile (to prevent optimization).
+ * alloc.c (GC_try_to_collect_inner, GC_stopped_mark,
+ GC_finish_collection, GC_allocobj): Remove outdated comments about
+ disabling signals.
+ * include/private/gc_priv.h (GC_register_displacement_inner,
+ GC_gcollect_inner): Ditto.
+ * alloc.c (GC_try_to_collect_inner, GC_stopped_mark,
+ GC_finish_collection): Initialize "start_time" local variable (to 0)
+ to suppress compiler warning.
+ * mark_rts.c (GC_add_roots_inner): Ditto (for "old" variable).
+ * alloc.c (GC_RATE, MAX_PRIOR_ATTEMPTS): Guard with "ifndef".
+ * include/private/gc_priv.h (clock, GC_stop_world, GC_start_world,
+ GC_acquire_mark_lock, GC_release_mark_lock, GC_notify_all_builder,
+ GC_wait_for_reclaim, GC_notify_all_marker, GC_wait_marker): Replace
+ K&R-style function prototypes with ANSI C one.
+ * include/private/gc_priv.h (ABORT): Define as DebugBreak() for
+ Win32/WinCE if SMALL_CONFIG (the same as in GC_abort()).
+ * include/private/gc_priv.h (ROUNDED_UP_WORDS, abs): Remove unused
+ macros.
+ * include/private/gc_priv.h (GC_noop): Declare for Borland C the
+ same as for Watcom.
+ * mark_rts.c (GC_push_conditional_with_exclusions): Add ARGSUSED tag.
+
2009-06-04 Hans Boehm <Hans.Boehm@hp.com> (Really Ivan Maidanski)
(diff89_cvs, resembling diff3, diff27, diff34, diff38, diff47, diff49,
diff59, diff60, diff66, diff67, diff68, diff69a, diff70, diff81
int GC_parallel = FALSE; /* By default, parallel GC is off. */
-int GC_full_freq = 19; /* Every 20th collection is a full */
+#ifndef GC_FULL_FREQ
+# define GC_FULL_FREQ 19 /* Every 20th collection is a full */
/* collection, whether we need it */
/* or not. */
+#endif
+
+int GC_full_freq = GC_FULL_FREQ;
-GC_bool GC_need_full_gc = FALSE;
+STATIC GC_bool GC_need_full_gc = FALSE;
/* Need full GC do to heap growth. */
#ifdef THREADS
{"Copyright 1988,1989 Hans-J. Boehm and Alan J. Demers ",
"Copyright (c) 1991-1995 by Xerox Corporation. All rights reserved. ",
"Copyright (c) 1996-1998 by Silicon Graphics. All rights reserved. ",
-"Copyright (c) 1999-2001 by Hewlett-Packard Company. All rights reserved. ",
+"Copyright (c) 1999-2009 by Hewlett-Packard Company. All rights reserved. ",
"THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY",
" EXPRESSED OR IMPLIED. ANY USE IS AT YOUR OWN RISK.",
"See source code for details." };
extern signed_word GC_bytes_found; /* Number of reclaimed bytes */
/* after garbage collection */
-GC_bool GC_dont_expand = 0;
+#ifdef GC_DONT_EXPAND
+ GC_bool GC_dont_expand = TRUE;
+#else
+ GC_bool GC_dont_expand = FALSE;
+#endif
+
+#ifndef GC_FREE_SPACE_DIVISOR
+# define GC_FREE_SPACE_DIVISOR 3 /* must be > 0 */
+#endif
-word GC_free_space_divisor = 3;
+word GC_free_space_divisor = GC_FREE_SPACE_DIVISOR;
extern GC_bool GC_collection_in_progress(void);
/* Collection is in progress, or was abandoned. */
int GC_CALLBACK GC_never_stop_func (void) { return(0); }
-unsigned long GC_time_limit = TIME_LIMIT;
+#ifndef GC_TIME_LIMIT
+# define GC_TIME_LIMIT 50 /* We try to keep pause times from exceeding */
+ /* this by much. In milliseconds. */
+#endif
+
+unsigned long GC_time_limit = GC_TIME_LIMIT;
#ifndef NO_CLOCK
STATIC CLOCK_TYPE GC_start_time;/* Time at which we stopped world. */
time_diff = MS_TIME_DIFF(current_time,GC_start_time);
if (time_diff >= GC_time_limit) {
if (GC_print_stats) {
- GC_log_printf("Abandoning stopped marking after ");
- GC_log_printf("%lu msecs", time_diff);
- GC_log_printf("(attempt %d)\n", GC_n_attempts);
+ GC_log_printf(
+ "Abandoning stopped marking after %lu msecs (attempt %d)\n",
+ time_diff, GC_n_attempts);
}
return(1);
}
/* stack clear of long-lived, client-generated garbage. */
STATIC void GC_clear_a_few_frames(void)
{
-# define NWORDS 64
- word frames[NWORDS];
+# ifndef CLEAR_NWORDS
+# define CLEAR_NWORDS 64
+# endif
+ volatile word frames[CLEAR_NWORDS];
int i;
- for (i = 0; i < NWORDS; i++) frames[i] = 0;
+ for (i = 0; i < CLEAR_NWORDS; i++) frames[i] = 0;
}
/* Heap size at which we need a collection to avoid expanding past */
STATIC GC_bool GC_is_full_gc = FALSE;
+STATIC GC_bool GC_stopped_mark(GC_stop_func stop_func);
+STATIC void GC_finish_collection(void);
+
/*
* Initiate a garbage collection if appropriate.
* Choose judiciously
/*
- * Stop the world garbage collection. Assumes lock held, signals disabled.
- * If stop_func is not GC_never_stop_func, then abort if stop_func returns TRUE.
+ * Stop the world garbage collection. Assumes lock held. If stop_func is
+ * not GC_never_stop_func then abort if stop_func returns TRUE.
* Return TRUE if we successfully completed the collection.
*/
GC_bool GC_try_to_collect_inner(GC_stop_func stop_func)
{
# ifndef SMALL_CONFIG
- CLOCK_TYPE start_time, current_time;
+ CLOCK_TYPE start_time = 0; /* initialized to prevent warning. */
+ CLOCK_TYPE current_time;
# endif
if (GC_dont_gc) return FALSE;
if (GC_incremental && GC_collection_in_progress()) {
* GC strategy, since otherwise we allocate too much during GC, and the
* cleanup gets expensive.
*/
+#ifndef GC_RATE
# define GC_RATE 10
+#endif
+#ifndef MAX_PRIOR_ATTEMPTS
# define MAX_PRIOR_ATTEMPTS 1
+#endif
/* Maximum number of prior attempts at world stop marking */
/* A value of 1 means that we finish the second time, no matter */
/* how long it takes. Doesn't count the initial root scan */
#endif
/*
- * Assumes lock is held, signals are disabled.
- * We stop the world.
+ * Assumes lock is held. We stop the world and mark from all roots.
* If stop_func() ever returns TRUE, we may fail and return FALSE.
* Increment GC_gc_no if we succeed.
*/
-GC_bool GC_stopped_mark(GC_stop_func stop_func)
+STATIC GC_bool GC_stopped_mark(GC_stop_func stop_func)
{
unsigned i;
int dummy;
# ifndef SMALL_CONFIG
- CLOCK_TYPE start_time, current_time;
+ CLOCK_TYPE start_time = 0; /* initialized to prevent warning. */
+ CLOCK_TYPE current_time;
# endif
# if !defined(REDIRECT_MALLOC) && (defined(MSWIN32) || defined(MSWINCE))
STOP_WORLD();
IF_THREADS(GC_world_stopped = TRUE);
if (GC_print_stats) {
- GC_log_printf("--> Marking for collection %lu ",
- (unsigned long)GC_gc_no + 1);
- GC_log_printf("after %lu allocd bytes\n",
- (unsigned long) GC_bytes_allocd);
+ /* Output blank line for convenience here */
+ GC_log_printf(
+ "\n--> Marking for collection %lu after %lu allocated bytes\n",
+ (unsigned long)GC_gc_no + 1, (unsigned long) GC_bytes_allocd);
}
# ifdef MAKE_BACK_GRAPH
if (GC_print_back_height) {
for(i = 0;;i++) {
if ((*stop_func)()) {
if (GC_print_stats) {
- GC_log_printf("Abandoned stopped marking after ");
- GC_log_printf("%u iterations\n", i);
+ GC_log_printf("Abandoned stopped marking after "
+ "%u iterations\n", i);
}
GC_deficit = i; /* Give the mutator a chance. */
IF_THREADS(GC_world_stopped = FALSE);
GC_gc_no++;
if (GC_print_stats) {
- GC_log_printf("Collection %lu reclaimed %ld bytes",
- (unsigned long)GC_gc_no - 1,
- (long)GC_bytes_found);
- GC_log_printf(" ---> heapsize = %lu bytes\n",
- (unsigned long) GC_heapsize);
+ GC_log_printf(
+ "Collection %lu reclaimed %ld bytes ---> heapsize = %lu bytes\n",
+ (unsigned long)(GC_gc_no - 1), (long)GC_bytes_found,
+ (unsigned long)GC_heapsize);
/* Printf arguments may be pushed in funny places. Clear the */
/* space. */
GC_log_printf("");
void GC_traverse_back_graph(void);
#endif
-/* Finish up a collection. Assumes lock is held, signals are disabled, */
-/* but the world is otherwise running. */
-void GC_finish_collection(void)
+/* Finish up a collection. Assumes mark bits are consistent, lock is */
+/* held, but the world is otherwise running. */
+STATIC void GC_finish_collection(void)
{
# ifndef SMALL_CONFIG
- CLOCK_TYPE start_time;
- CLOCK_TYPE finalize_time;
+ CLOCK_TYPE start_time = 0; /* initialized to prevent warning. */
+ CLOCK_TYPE finalize_time = 0;
CLOCK_TYPE done_time;
# endif
}
if (GC_print_stats == VERBOSE) {
- GC_log_printf(
- "Immediately reclaimed %ld bytes in heap of size %lu bytes",
- (long)GC_bytes_found,
- (unsigned long)GC_heapsize);
# ifdef USE_MUNMAP
- GC_log_printf("(%lu unmapped)", (unsigned long)GC_unmapped_bytes);
+ GC_log_printf("Immediately reclaimed %ld bytes in heap"
+ " of size %lu bytes (%lu unmapped)\n",
+ (long)GC_bytes_found, (unsigned long)GC_heapsize,
+ (unsigned long)GC_unmapped_bytes);
+# else
+ GC_log_printf("Immediately reclaimed %ld bytes in heap"
+ " of size %lu bytes\n",
+ (long)GC_bytes_found, (unsigned long)GC_heapsize);
# endif
- GC_log_printf("\n");
}
/* Reset or increment counters for next cycle */
DCL_LOCK_STATE;
if (!GC_is_initialized) GC_init();
+ GC_ASSERT(stop_func != 0);
if (GC_debugging_started) GC_print_all_smashed();
GC_INVOKE_FINALIZERS();
LOCK();
struct hblk *h;
unsigned nbl = 0;
- GC_printf("Section %d from %p to %p ", i,
- start, start + len);
for (h = (struct hblk *)start; h < (struct hblk *)(start + len); h++) {
if (GC_is_black_listed(h, HBLKSIZE)) nbl++;
}
- GC_printf("%lu/%lu blacklisted\n", (unsigned long)nbl,
- (unsigned long)(len/HBLKSIZE));
+ GC_printf("Section %d from %p to %p %lu/%lu blacklisted\n",
+ i, start, start + len,
+ (unsigned long)nbl, (unsigned long)(len/HBLKSIZE));
}
}
# endif
* Make sure the object free list for size gran (in granules) is not empty.
* Return a pointer to the first object on the free list.
* The object MUST BE REMOVED FROM THE FREE LIST BY THE CALLER.
- * Assumes we hold the allocator lock and signals are disabled.
- *
+ * Assumes we hold the allocator lock.
*/
ptr_t GC_allocobj(size_t gran, int kind)
{
# define MAXHINCR 4096
# endif
-# define TIME_LIMIT 50 /* We try to keep pause times from exceeding */
- /* this by much. In milliseconds. */
-
# define BL_LIMIT GC_black_list_spacing
/* If we need a block of N bytes, and we have */
/* a block of N + BL_LIMIT bytes available, */
# else /* !MSWIN32, !MSWINCE, !BSD_TIME */
# include <time.h>
# if !defined(__STDC__) && defined(SPARC) && defined(SUNOS4)
- clock_t clock(); /* Not in time.h, where it belongs */
+ clock_t clock(void); /* Not in time.h, where it belongs */
# endif
# if defined(FREEBSD) && !defined(CLOCKS_PER_SEC)
# include <machine/limits.h>
PCR_waitForever);
# else
# if defined(GC_WIN32_THREADS) || defined(GC_PTHREADS)
- void GC_stop_world();
- void GC_start_world();
+ void GC_stop_world(void);
+ void GC_start_world(void);
# define STOP_WORLD() GC_stop_world()
# define START_WORLD() GC_start_world()
# else
# define ABORT(s) PCR_Base_Panic(s)
# else
# ifdef SMALL_CONFIG
-# define ABORT(msg) abort()
+# if defined(MSWIN32) || defined(MSWINCE)
+# define ABORT(msg) DebugBreak()
+# else
+# define ABORT(msg) abort()
+# endif
# else
GC_API void GC_abort(const char * msg);
# define ABORT(msg) GC_abort(msg)
# define HBLKDISPL(objptr) (((size_t) (objptr)) & (HBLKSIZE-1))
/* Round up byte allocation requests to integral number of words, etc. */
-# define ROUNDED_UP_WORDS(n) \
- BYTES_TO_WORDS((n) + (WORDS_TO_BYTES(1) - 1 + EXTRA_BYTES))
# define ROUNDED_UP_GRANULES(n) \
BYTES_TO_GRANULES((n) + (GRANULE_BYTES - 1 + EXTRA_BYTES))
# if MAX_EXTRA_BYTES == 0
/* even though they are not useful to the client. */
word _bytes_finalized;
/* Approximate number of bytes in objects (and headers) */
- /* That became ready for finalization in the last */
+ /* that became ready for finalization in the last */
/* collection. */
word _non_gc_bytes_at_gc;
/* Number of explicitly managed bytes of storage */
extern GC_bool GC_world_stopped;
#endif
-/* Operations */
-# ifndef abs
-# define abs(x) ((x) < 0? (-(x)) : (x))
-# endif
-
-
/* Marks are in a reserved area in */
/* each heap block. Each word has one mark bit associated */
/* with it. Only those corresponding to the beginning of an */
/* stacks are scheduled for scanning in *GC_push_other_roots, which */
/* is thread-package-specific. */
#endif
-void GC_push_current_stack(ptr_t cold_gc_frame, void *context);
- /* Push enough of the current stack eagerly to */
- /* ensure that callee-save registers saved in */
- /* GC frames are scanned. */
- /* In the non-threads case, schedule entire */
- /* stack for scanning. */
- /* The second argument is a pointer to the */
- /* (possibly null) thread context, for */
- /* (currently hypothetical) more precise */
- /* stack scanning. */
void GC_push_roots(GC_bool all, ptr_t cold_gc_frame);
/* Push all or dirty roots. */
extern void (*GC_push_other_roots)(void);
/* predefined to be non-zero. A client */
/* supplied replacement should also call the */
/* original function. */
-extern void GC_push_gc_structures(void);
- /* Push GC internal roots. These are normally */
- /* included in the static data segment, and */
- /* Thus implicitly pushed. But we must do this */
- /* explicitly if normal root processing is */
- /* disabled. Calls the following: */
+
extern void GC_push_finalizer_structures(void);
extern void GC_push_stubborn_structures (void);
# ifdef THREADS
/* Not called if 0. Called with allocation */
/* lock held. */
/* 0 by default. */
-void GC_push_regs_and_stack(ptr_t cold_gc_frame);
void GC_push_regs(void);
/* Ditto, but also mark from clean pages. */
struct hblk * GC_push_next_marked_uncollectable(struct hblk * h);
/* Ditto, but mark only from uncollectable pages. */
-GC_bool GC_stopped_mark(GC_stop_func stop_func);
- /* Stop world and mark from all roots */
- /* and rescuers. */
void GC_clear_hdr_marks(hdr * hhdr);
/* Clear the mark bits in a header */
void GC_set_hdr_marks(hdr * hhdr);
/* Return FALSE on failure. */
void GC_register_displacement_inner(size_t offset);
/* Version of GC_register_displacement */
- /* that assumes lock is already held */
- /* and signals are already disabled. */
+ /* that assumes lock is already held. */
void GC_initialize_offsets(void);
/* Initialize GC_valid_offsets, */
GC_bool GC_try_to_collect_inner(GC_stop_func f);
/* Collect; caller must have acquired */
- /* lock and disabled signals. */
- /* Collection is aborted if f returns */
- /* TRUE. Returns TRUE if it completes */
- /* successfully. */
+ /* lock. Collection is aborted if f */
+ /* returns TRUE. Returns TRUE if it */
+ /* completes successfully. */
# define GC_gcollect_inner() \
(void) GC_try_to_collect_inner(GC_never_stop_func)
-void GC_finish_collection(void);
- /* Finish collection. Mark bits are */
- /* consistent and lock is still held. */
GC_bool GC_collect_or_expand(word needed_blocks, GC_bool ignore_off_page);
/* Collect or expand heap in an attempt */
/* make the indicated number of free */
#endif
/* Make arguments appear live to compiler */
-# ifdef __WATCOMC__
+# if defined(__BORLANDC__) || defined(__WATCOMC__)
void GC_noop(void*, ...);
# else
# ifdef __DMC__
/* GC_notify_all_builder() is called when GC_fl_builder_count */
/* reaches 0. */
- extern void GC_acquire_mark_lock();
- extern void GC_release_mark_lock();
- extern void GC_notify_all_builder();
- extern void GC_wait_for_reclaim();
+ void GC_acquire_mark_lock(void);
+ void GC_release_mark_lock(void);
+ void GC_notify_all_builder(void);
+ void GC_wait_for_reclaim(void);
extern word GC_fl_builder_count; /* Protected by mark lock. */
- extern void GC_notify_all_marker();
- extern void GC_wait_marker();
+ void GC_notify_all_marker(void);
+ void GC_wait_marker(void);
extern word GC_mark_no; /* Protected by mark lock. */
extern void GC_help_marker(word my_mark_no);
GC_noop1((word)(&dummy));
}
-void GC_push_regs_and_stack(ptr_t cold_gc_frame)
-{
- GC_with_callee_saves_pushed(GC_push_current_stack, cold_gc_frame);
-}
-
#if defined(ASM_CLEAR_CODE)
# ifdef LINT
/*ARGSUSED*/
size_t total = 0;
for (i = 0; i < n_root_sets; i++) {
- GC_printf("From %p to %p ",
+ GC_printf("From %p to %p%s\n",
GC_static_roots[i].r_start,
- GC_static_roots[i].r_end);
- if (GC_static_roots[i].r_tmp) {
- GC_printf(" (temporary)\n");
- } else {
- GC_printf("\n");
- }
+ GC_static_roots[i].r_end,
+ GC_static_roots[i].r_tmp ? " (temporary)" : "");
total += GC_static_roots[i].r_end - GC_static_roots[i].r_start;
}
GC_printf("Total size: %ld\n", (unsigned long) total);
/* takes to scan the roots. */
{
register int i;
-
+ old = 0; /* initialized to prevent warning. */
for (i = 0; i < n_root_sets; i++) {
old = GC_static_roots + i;
if (b <= old -> r_end && e >= old -> r_start) {
}
/* Invoke push_conditional on ranges that are not excluded. */
+/*ARGSUSED*/
STATIC void GC_push_conditional_with_exclusions(ptr_t bottom, ptr_t top,
GC_bool all)
{
}
}
+ /* Push enough of the current stack eagerly to */
+ /* ensure that callee-save registers saved in */
+ /* GC frames are scanned. */
+ /* In the non-threads case, schedule entire */
+ /* stack for scanning. */
+ /* The second argument is a pointer to the */
+ /* (possibly null) thread context, for */
+ /* (currently hypothetical) more precise */
+ /* stack scanning. */
/*
* In the absence of threads, push the stack contents.
* In the presence of threads, push enough of the current stack
* FIXME: Merge with per-thread stuff.
*/
/*ARGSUSED*/
-void GC_push_current_stack(ptr_t cold_gc_frame, void * context)
+STATIC void GC_push_current_stack(ptr_t cold_gc_frame, void * context)
{
# if defined(THREADS)
if (0 == cold_gc_frame) return;
void (*GC_push_typed_structures) (void) = NULL;
+ /* Push GC internal roots. These are normally */
+ /* included in the static data segment, and */
+ /* Thus implicitly pushed. But we must do this */
+ /* explicitly if normal root processing is */
+ /* disabled. */
/*
* Push GC internal roots. Only called if there is some reason to believe
* these would not otherwise get registered.
*/
-void GC_push_gc_structures(void)
+STATIC void GC_push_gc_structures(void)
{
GC_push_finalizer_structures();
# if defined(THREADS)
# endif
}
+STATIC void GC_push_regs_and_stack(ptr_t cold_gc_frame)
+{
+ GC_with_callee_saves_pushed(GC_push_current_stack, cold_gc_frame);
+}
+
/*
* Call the mark routines (GC_tl_push for a single pointer, GC_push_conditional
* on groups of pointers) on every top level accessible pointer.