int id;
int active;
+
+ /* NULL-terminated array of spans that led to this span. Used only for
+ * checking for circular references (cycles) with id=0 spans.
+ */
+ yasm_span **backtrace;
};
typedef struct optimize_data {
/*@reldef@*/ STAILQ_HEAD(, yasm_span) QA, QB;
/*@only@*/ IntervalTree *itree;
long len_diff; /* used only for optimize_term_expand */
+ yasm_span *span; /* used only for check_cycle */
} optimize_data;
static yasm_span *
span->pos_thres = pos_thres;
span->id = id;
span->active = 1;
+ span->backtrace = NULL;
return span;
}
span->items[i].data.intn = yasm_intnum_create_int(0);
/* Check for circular references */
- if ((span->bc->bc_index >= span->terms[i].precbc->bc_index &&
+ if ((span->bc->bc_index > span->terms[i].precbc->bc_index &&
span->bc->bc_index <= span->terms[i].precbc2->bc_index) ||
- (span->bc->bc_index >= span->terms[i].precbc2->bc_index &&
+ (span->bc->bc_index > span->terms[i].precbc2->bc_index &&
span->bc->bc_index <= span->terms[i].precbc->bc_index))
yasm_error_set(YASM_ERROR_VALUE,
N_("circular reference detected"));
yasm_intnum_destroy(span->items[i].data.intn);
yasm_xfree(span->items);
}
+ if (span->backtrace)
+ yasm_xfree(span->backtrace);
yasm_xfree(span);
}
}
}
+static void
+check_cycle(IntervalTreeNode *node, void *d)
+{
+ optimize_data *optd = d;
+ yasm_span_term *term = node->data;
+ yasm_span *depspan = term->span;
+ int bt_size = 0, dep_bt_size = 0;
+
+ /* Only check for cycles in id=0 spans */
+ if (depspan->id != 0)
+ return;
+
+ /* Check for a circular reference by looking to see if this dependent
+ * span is in our backtrace.
+ */
+ if (optd->span->backtrace) {
+ yasm_span *s;
+ while ((s = optd->span->backtrace[bt_size])) {
+ bt_size++;
+ if (s == depspan)
+ yasm_error_set(YASM_ERROR_VALUE,
+ N_("circular reference detected"));
+ }
+ }
+
+ /* Add our complete backtrace and ourselves to backtrace of dependent
+ * span.
+ */
+ if (!depspan->backtrace) {
+ depspan->backtrace = yasm_xmalloc((bt_size+2)*sizeof(yasm_span *));
+ memcpy(depspan->backtrace, optd->span->backtrace,
+ bt_size*sizeof(yasm_span *));
+ depspan->backtrace[bt_size] = optd->span;
+ depspan->backtrace[bt_size+1] = NULL;
+ return;
+ }
+
+ while (depspan->backtrace[dep_bt_size])
+ dep_bt_size++;
+ depspan->backtrace =
+ yasm_xrealloc(depspan->backtrace,
+ (dep_bt_size+bt_size+2)*sizeof(yasm_span *));
+ memcpy(&depspan->backtrace[dep_bt_size], optd->span->backtrace,
+ (bt_size-1)*sizeof(yasm_span *));
+ depspan->backtrace[dep_bt_size+bt_size] = optd->span;
+ depspan->backtrace[dep_bt_size+bt_size+1] = NULL;
+}
+
static void
optimize_term_expand(IntervalTreeNode *node, void *d)
{
}
}
+ /* Look for cycles in times expansion (span.id==0) */
+ TAILQ_FOREACH(span, &optd.spans, link) {
+ if (span->id != 0)
+ continue;
+ optd.span = span;
+ IT_enumerate(optd.itree, (long)span->bc->bc_index,
+ (long)span->bc->bc_index, &optd, check_cycle);
+ if (yasm_error_occurred()) {
+ yasm_errwarn_propagate(errwarns, span->bc->line);
+ saw_error = 1;
+ }
+ }
+
+ if (saw_error) {
+ optimize_cleanup(&optd);
+ return;
+ }
+
/* Step 2 */
STAILQ_INIT(&optd.QA);
while (!STAILQ_EMPTY(&optd.QA) || !(STAILQ_EMPTY(&optd.QB))) {