int result;
DCL_LOCK_STATE;
- LOCK();
if (!EXPECT(GC_is_initialized, TRUE)) GC_init();
+ LOCK();
result = (int)GC_expand_hp_inner(divHBLKSZ((word)bytes));
if (result) GC_requested_heapsize += bytes;
UNLOCK();
GC_ASSERT(I_HOLD_LOCK());
lb = ROUNDUP_GRANULE_SIZE(lb);
n_blocks = OBJ_SZ_TO_BLOCKS(lb);
- if (!EXPECT(GC_is_initialized, TRUE)) GC_init();
+ if (!EXPECT(GC_is_initialized, TRUE)) {
+ DCL_LOCK_STATE;
+ UNLOCK(); /* just to unset GC_lock_holder */
+ GC_init();
+ LOCK();
+ }
/* Do our share of marking work */
if (GC_incremental && !GC_dont_gc)
GC_collect_a_little_inner((int)n_blocks);
if (EXPECT(0 == op, FALSE)) {
if (lg == 0) {
if (!EXPECT(GC_is_initialized, TRUE)) {
+ DCL_LOCK_STATE;
+ UNLOCK(); /* just to unset GC_lock_holder */
GC_init();
+ LOCK();
lg = GC_size_map[lb];
}
if (0 == lg) {
GC_print_all_errors();
GC_INVOKE_FINALIZERS();
GC_DBG_COLLECT_AT_MALLOC(lb);
- LOCK();
if (!EXPECT(GC_is_initialized, TRUE)) GC_init();
+ LOCK();
/* Do our share of marking work */
if (GC_incremental && !GC_dont_gc) {
ENTER_GC();
GC_is_initialized = TRUE;
# if defined(GC_PTHREADS) || defined(GC_WIN32_THREADS)
# if defined(GC_ASSERTIONS) && defined(GC_ALWAYS_MULTITHREADED)
+ DCL_LOCK_STATE;
LOCK(); /* just to set GC_lock_holder */
GC_thr_init();
UNLOCK();
# endif
COND_DUMP;
/* Get black list set up and/or incremental GC started */
- if (!GC_dont_precollect || GC_incremental) GC_gcollect_inner();
+ if (!GC_dont_precollect || GC_incremental) {
+# if defined(GC_ASSERTIONS) && defined(GC_ALWAYS_MULTITHREADED)
+ LOCK();
+ GC_gcollect_inner();
+ UNLOCK();
+# else
+ GC_gcollect_inner();
+# endif
+ }
# ifdef STUBBORN_ALLOC
GC_stubborn_init();
# endif
maybe_install_looping_handler(); /* Before write fault handler! */
GC_incremental = TRUE;
if (!GC_is_initialized) {
+ UNLOCK();
GC_init();
+ LOCK();
} else {
GC_dirty_init();
}