Issue #179 (bdwgc).
Now, if there is a chance of unmapping of root segments, the latter
ones are scanned immediately by GC_push_roots thus preventing memory
protection faults in GC_mark_local.
* include/private/gc_priv.h (GC_PUSH_CONDITIONAL): Move definition to
mark_rts.c.
* include/private/gcconfig.h [WRAP_MARK_SOME && PARALLEL_MARK]
(PARALLEL_MARK): Do not undefine; remove TODO item.
* mark.c [WRAP_MARK_SOME && PARALLEL_MARK] (GC_push_conditional_eager):
New internal function; add TODO item.
* mark_rts.c [WRAP_MARK_SOME && PARALLEL_MARK]
(GC_push_conditional_eager): Declare function.
* mark_rts.c [WRAP_MARK_SOME && PARALLEL_MARK] (GC_PUSH_CONDITIONAL):
Define to GC_push_conditional_eager if GC_parallel.
GC_INNER GC_bool GC_collection_in_progress(void);
/* Collection is in progress, or was abandoned. */
-#ifndef GC_DISABLE_INCREMENTAL
-# define GC_PUSH_CONDITIONAL(b, t, all) \
- GC_push_conditional((ptr_t)(b), (ptr_t)(t), all)
- /* Do either of GC_push_all or GC_push_selected */
- /* depending on the third arg. */
-#else
-# define GC_PUSH_CONDITIONAL(b, t, all) GC_push_all((ptr_t)(b), (ptr_t)(t))
-#endif
-
#define GC_PUSH_ALL_SYM(sym) \
GC_push_all((ptr_t)&(sym), (ptr_t)&(sym) + sizeof(sym))
# define WRAP_MARK_SOME
#endif
-#if defined(WRAP_MARK_SOME) && defined(PARALLEL_MARK)
- /* TODO: GC_mark_local does not handle memory protection faults yet. */
-# undef PARALLEL_MARK
-#endif
-
#if defined(PARALLEL_MARK) && !defined(DEFAULT_STACK_MAYBE_SMALL) \
&& (defined(HPUX) || defined(GC_DGUX386_THREADS) \
|| defined(NO_GETCONTEXT) /* e.g. musl */)
# endif
}
+#if defined(WRAP_MARK_SOME) && defined(PARALLEL_MARK)
+ /* Similar to GC_push_conditional but scans the whole region immediately. */
+ GC_INNER void GC_push_conditional_eager(ptr_t bottom, ptr_t top,
+ GC_bool all)
+ GC_ATTR_NO_SANITIZE_ADDR GC_ATTR_NO_SANITIZE_MEMORY
+ {
+ word * b = (word *)(((word) bottom + ALIGNMENT-1) & ~(ALIGNMENT-1));
+ word * t = (word *)(((word) top) & ~(ALIGNMENT-1));
+ register word *p;
+ register word *lim;
+ register ptr_t greatest_ha = GC_greatest_plausible_heap_addr;
+ register ptr_t least_ha = GC_least_plausible_heap_addr;
+# define GC_greatest_plausible_heap_addr greatest_ha
+# define GC_least_plausible_heap_addr least_ha
+
+ if (top == NULL)
+ return;
+ (void)all; /* TODO: If !all then scan only dirty pages. */
+
+ lim = t - 1;
+ for (p = b; (word)p <= (word)lim; p = (word *)((ptr_t)p + ALIGNMENT)) {
+ register word q = *p;
+ GC_PUSH_ONE_HEAP(q, p, GC_mark_stack_top);
+ }
+# undef GC_greatest_plausible_heap_addr
+# undef GC_least_plausible_heap_addr
+ }
+#endif /* WRAP_MARK_SOME && PARALLEL_MARK */
+
#if !defined(SMALL_CONFIG) && !defined(USE_MARK_BYTES) && \
defined(MARK_BIT_PER_GRANULE)
# if GC_GRANULE_WORDS == 1
UNLOCK();
}
+#if defined(WRAP_MARK_SOME) && defined(PARALLEL_MARK)
+ /* GC_mark_local does not handle memory protection faults yet. So, */
+ /* the static data regions are scanned immediately by GC_push_roots. */
+ GC_INNER void GC_push_conditional_eager(ptr_t bottom, ptr_t top,
+ GC_bool all);
+# define GC_PUSH_CONDITIONAL(b, t, all) \
+ (GC_parallel \
+ ? GC_push_conditional_eager(b, t, all) \
+ : GC_push_conditional((ptr_t)(b), (ptr_t)(t), all))
+#elif defined(GC_DISABLE_INCREMENTAL)
+# define GC_PUSH_CONDITIONAL(b, t, all) GC_push_all((ptr_t)(b), (ptr_t)(t))
+#else
+# define GC_PUSH_CONDITIONAL(b, t, all) \
+ GC_push_conditional((ptr_t)(b), (ptr_t)(t), all)
+ /* Do either of GC_push_all or GC_push_selected */
+ /* depending on the third arg. */
+#endif
+
/* Invoke push_conditional on ranges that are not excluded. */
STATIC void GC_push_conditional_with_exclusions(ptr_t bottom, ptr_t top,
GC_bool all GC_ATTR_UNUSED)