]> granicus.if.org Git - liblinear/commitdiff
Fix bug and memory leaks in 2.0
authorboyu <r02222047@ntu.edu.tw>
Tue, 7 Jul 2015 16:16:29 +0000 (00:16 +0800)
committerboyu <r02222047@ntu.edu.tw>
Wed, 8 Jul 2015 08:50:14 +0000 (16:50 +0800)
- For SVR, model_->w was not initialized.
  Now it is initialized in train().
- For parameter search, subprob of CV and its x, y are not freed
  at the end of find_parameter_C().
- There were problems in checking prev_w for stopping condition.
  It wrongly allocated memory for num_unchange_w < 0.
  Flow control is modified.
- Reposition w_new in tron to reduce peak memory usage.
- Now print nr_fold when parameter search starts.

linear.cpp
train.c
tron.cpp

index 7ad136ff5b92cecb2b81530b0a23eb00809c110d..deb01690b80697de0508adc1c9c378f76986d649 100644 (file)
@@ -2343,6 +2343,8 @@ model* train(const problem *prob, const parameter *param)
        if(check_regression_model(model_))
        {
                model_->w = Malloc(double, w_size);
+               for(i=0; i<w_size; i++)
+                       model_->w[i] = 0;
                model_->nr_class = 2;
                model_->label = NULL;
                train_one(prob, param, model_->w, 0, 0);
@@ -2616,7 +2618,13 @@ void find_parameter_C(const problem *prob, const parameter *param, int nr_fold,
                        else
                                total_w_size = subprob[i].n * submodel->nr_class;
 
-                       if(prev_w[i] != NULL && num_unchanged_w >= 0)
+                       if(prev_w[i] == NULL)
+                       {
+                               prev_w[i] = Malloc(double, total_w_size);
+                               for(j=0; j<total_w_size; j++)
+                                       prev_w[i][j] = submodel->w[j];
+                       }
+                       else if(num_unchanged_w >= 0)
                        {
                                double norm_w_diff = 0;
                                for(j=0; j<total_w_size; j++)
@@ -2631,7 +2639,6 @@ void find_parameter_C(const problem *prob, const parameter *param, int nr_fold,
                        }
                        else
                        {
-                               prev_w[i] = Malloc(double, total_w_size);
                                for(j=0; j<total_w_size; j++)
                                        prev_w[i][j] = submodel->w[j];
                        }
@@ -2667,8 +2674,13 @@ void find_parameter_C(const problem *prob, const parameter *param, int nr_fold,
        free(perm);
        free(target);
        for(i=0; i<nr_fold; i++)
+       {
+               free(subprob[i].x);
+               free(subprob[i].y);
                free(prev_w[i]);
+       }
        free(prev_w);
+       free(subprob);
 }
 
 double predict_values(const struct model *model_, const struct feature_node *x, double *dec_values)
diff --git a/train.c b/train.c
index 4df8594e5db618d63d2a777d439299e6d4a8d244..53f6dbb19fbba62c1ae49e58c74ff4c161c2b203 100644 (file)
--- a/train.c
+++ b/train.c
@@ -149,6 +149,7 @@ void do_find_parameter_C()
                start_C = param.C;
        else
                start_C = -1.0;
+       printf("Doing parameter search with %d-fold cross validation.\n", nr_fold);
        find_parameter_C(&prob, &param, nr_fold, start_C, max_C, &best_C, &best_rate);
        printf("Best C = %lf  CV accuracy = %g%%\n", best_C, 100.0*best_rate);
 }
index 2cd283405da8229ca70617cddeb140fd38fd24ff..3ea60f6dfa45be60f159a9ba3126d21a6d57071f 100644 (file)
--- a/tron.cpp
+++ b/tron.cpp
@@ -69,7 +69,6 @@ void TRON::tron(double *w)
        int search = 1, iter = 1, inc = 1;
        double *s = new double[n];
        double *r = new double[n];
-       double *w_new = new double[n];
        double *g = new double[n];
 
        // calculate gradient norm at w=0 for stopping condition.
@@ -91,6 +90,7 @@ void TRON::tron(double *w)
 
        iter = 1;
 
+       double *w_new = new double[n];
        while (iter <= max_iter && search)
        {
                cg_iter = trcg(delta, g, s, r);