if(check_regression_model(model_))
{
model_->w = Malloc(double, w_size);
+ for(i=0; i<w_size; i++)
+ model_->w[i] = 0;
model_->nr_class = 2;
model_->label = NULL;
train_one(prob, param, model_->w, 0, 0);
else
total_w_size = subprob[i].n * submodel->nr_class;
- if(prev_w[i] != NULL && num_unchanged_w >= 0)
+ if(prev_w[i] == NULL)
+ {
+ prev_w[i] = Malloc(double, total_w_size);
+ for(j=0; j<total_w_size; j++)
+ prev_w[i][j] = submodel->w[j];
+ }
+ else if(num_unchanged_w >= 0)
{
double norm_w_diff = 0;
for(j=0; j<total_w_size; j++)
}
else
{
- prev_w[i] = Malloc(double, total_w_size);
for(j=0; j<total_w_size; j++)
prev_w[i][j] = submodel->w[j];
}
free(perm);
free(target);
for(i=0; i<nr_fold; i++)
+ {
+ free(subprob[i].x);
+ free(subprob[i].y);
free(prev_w[i]);
+ }
free(prev_w);
+ free(subprob);
}
double predict_values(const struct model *model_, const struct feature_node *x, double *dec_values)
start_C = param.C;
else
start_C = -1.0;
+ printf("Doing parameter search with %d-fold cross validation.\n", nr_fold);
find_parameter_C(&prob, ¶m, nr_fold, start_C, max_C, &best_C, &best_rate);
printf("Best C = %lf CV accuracy = %g%%\n", best_C, 100.0*best_rate);
}
int search = 1, iter = 1, inc = 1;
double *s = new double[n];
double *r = new double[n];
- double *w_new = new double[n];
double *g = new double[n];
// calculate gradient norm at w=0 for stopping condition.
iter = 1;
+ double *w_new = new double[n];
while (iter <= max_iter && search)
{
cg_iter = trcg(delta, g, s, r);