From: boyu Date: Tue, 7 Jul 2015 16:16:29 +0000 (+0800) Subject: Fix bug and memory leaks in 2.0 X-Git-Tag: v201~2 X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=b007df4a6f1dd014f011721146db38e3891c6e98;p=liblinear Fix bug and memory leaks in 2.0 - For SVR, model_->w was not initialized. Now it is initialized in train(). - For parameter search, subprob of CV and its x, y are not freed at the end of find_parameter_C(). - There were problems in checking prev_w for stopping condition. It wrongly allocated memory for num_unchange_w < 0. Flow control is modified. - Reposition w_new in tron to reduce peak memory usage. - Now print nr_fold when parameter search starts. --- diff --git a/linear.cpp b/linear.cpp index 7ad136f..deb0169 100644 --- a/linear.cpp +++ b/linear.cpp @@ -2343,6 +2343,8 @@ model* train(const problem *prob, const parameter *param) if(check_regression_model(model_)) { model_->w = Malloc(double, w_size); + for(i=0; iw[i] = 0; model_->nr_class = 2; model_->label = NULL; train_one(prob, param, model_->w, 0, 0); @@ -2616,7 +2618,13 @@ void find_parameter_C(const problem *prob, const parameter *param, int nr_fold, else total_w_size = subprob[i].n * submodel->nr_class; - if(prev_w[i] != NULL && num_unchanged_w >= 0) + if(prev_w[i] == NULL) + { + prev_w[i] = Malloc(double, total_w_size); + for(j=0; jw[j]; + } + else if(num_unchanged_w >= 0) { double norm_w_diff = 0; for(j=0; jw[j]; } @@ -2667,8 +2674,13 @@ void find_parameter_C(const problem *prob, const parameter *param, int nr_fold, free(perm); free(target); for(i=0; i