From: Peter Johnson Date: Thu, 6 Aug 2009 07:13:11 +0000 (-0000) Subject: Fix #186: Avoid memory runaway in optimizer TIMES circular reference checking. X-Git-Tag: v1.0.0~82 X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=7a0082c94d011a5154200b8fe7a204eef2765a07;p=yasm Fix #186: Avoid memory runaway in optimizer TIMES circular reference checking. Check for duplicates in backtrace used when checking for circular references. This uses a simple N^2 algorithm (as the number of items in the backtrace is usually small) but avoids memory runaway due to duplicate item storage. svn path=/trunk/yasm/; revision=2229 --- diff --git a/libyasm/section.c b/libyasm/section.c index 1f73929b..2c6c8de6 100644 --- a/libyasm/section.c +++ b/libyasm/section.c @@ -864,6 +864,7 @@ struct yasm_span { * checking for circular references (cycles) with id=0 spans. */ yasm_span **backtrace; + int backtrace_size; /* First offset setter following this span's bytecode */ yasm_offset_setter *os; @@ -901,6 +902,7 @@ create_span(yasm_bytecode *bc, int id, /*@null@*/ const yasm_value *value, span->id = id; span->active = 1; span->backtrace = NULL; + span->backtrace_size = 0; span->os = os; return span; @@ -1160,7 +1162,8 @@ check_cycle(IntervalTreeNode *node, void *d) optimize_data *optd = d; yasm_span_term *term = node->data; yasm_span *depspan = term->span; - int bt_size = 0, dep_bt_size = 0; + int i; + int depspan_bt_alloc; /* Only check for cycles in id=0 spans */ if (depspan->id > 0) @@ -1170,10 +1173,8 @@ check_cycle(IntervalTreeNode *node, void *d) * span is in our backtrace. */ if (optd->span->backtrace) { - yasm_span *s; - while ((s = optd->span->backtrace[bt_size])) { - bt_size++; - if (s == depspan) + for (i=0; ispan->backtrace_size; i++) { + if (optd->span->backtrace[i] == depspan) yasm_error_set(YASM_ERROR_VALUE, N_("circular reference detected")); } @@ -1183,25 +1184,51 @@ check_cycle(IntervalTreeNode *node, void *d) * span. */ if (!depspan->backtrace) { - depspan->backtrace = yasm_xmalloc((bt_size+2)*sizeof(yasm_span *)); - if (bt_size > 0) + depspan->backtrace = yasm_xmalloc((optd->span->backtrace_size+1)* + sizeof(yasm_span *)); + if (optd->span->backtrace_size > 0) memcpy(depspan->backtrace, optd->span->backtrace, - bt_size*sizeof(yasm_span *)); - depspan->backtrace[bt_size] = optd->span; - depspan->backtrace[bt_size+1] = NULL; + optd->span->backtrace_size*sizeof(yasm_span *)); + depspan->backtrace[optd->span->backtrace_size] = optd->span; + depspan->backtrace_size = optd->span->backtrace_size+1; return; } - while (depspan->backtrace[dep_bt_size]) - dep_bt_size++; - depspan->backtrace = - yasm_xrealloc(depspan->backtrace, - (dep_bt_size+bt_size+2)*sizeof(yasm_span *)); - if (bt_size > 0) - memcpy(&depspan->backtrace[dep_bt_size], optd->span->backtrace, - (bt_size-1)*sizeof(yasm_span *)); - depspan->backtrace[dep_bt_size+bt_size] = optd->span; - depspan->backtrace[dep_bt_size+bt_size+1] = NULL; + /* Add our complete backtrace, checking for duplicates */ + depspan_bt_alloc = depspan->backtrace_size; + for (i=0; ispan->backtrace_size; i++) { + int present = 0; + int j; + for (j=0; jbacktrace_size; j++) { + if (optd->span->backtrace[i] == optd->span->backtrace[j]) { + present = 1; + break; + } + } + if (present) + continue; + /* Not already in array; add it. */ + if (depspan->backtrace_size >= depspan_bt_alloc) + { + depspan_bt_alloc *= 2; + depspan->backtrace = + yasm_xrealloc(depspan->backtrace, + depspan_bt_alloc*sizeof(yasm_span *)); + } + depspan->backtrace[depspan->backtrace_size] = optd->span->backtrace[i]; + depspan->backtrace_size++; + } + + /* Add ourselves. */ + if (depspan->backtrace_size >= depspan_bt_alloc) + { + depspan_bt_alloc++; + depspan->backtrace = + yasm_xrealloc(depspan->backtrace, + depspan_bt_alloc*sizeof(yasm_span *)); + } + depspan->backtrace[depspan->backtrace_size] = optd->span; + depspan->backtrace_size++; } static void