]> granicus.if.org Git - liblinear/commitdiff
Fix some minor indentation and space issues
authorChih-Jen Lin <cjlin@csie.ntu.edu.tw>
Mon, 18 Mar 2019 18:32:15 +0000 (02:32 +0800)
committerChih-Jen Lin <cjlin@csie.ntu.edu.tw>
Mon, 18 Mar 2019 18:32:15 +0000 (02:32 +0800)
linear.cpp
python/liblinearutil.py

index 5dfab75d8cda69e394bd8b4911087b5a55eb07df..063e01c028285c01699e298b87031308e7cf2840 100644 (file)
@@ -2713,12 +2713,14 @@ void find_parameters(const problem *prob, const parameter *param, int nr_fold, d
        *best_p = -1;
        if(param->solver_type == L2R_LR || param->solver_type == L2R_L2LOSS_SVC)
        {
-               if( start_C <= 0)
+               if(start_C <= 0)
                        start_C = calc_start_C(prob, &param_tmp);
                double max_C = 1024;
+               start_C = min(start_C, max_C);          
                double best_C_tmp, best_score_tmp;
-        start_C = min(start_C, max_C);
+               
                find_parameter_C(prob, &param_tmp, start_C, max_C, &best_C_tmp, &best_score_tmp, fold_start, perm, subprob, nr_fold);
+               
                *best_C = best_C_tmp;
                *best_score = best_score_tmp;
        }
@@ -2735,14 +2737,16 @@ void find_parameters(const problem *prob, const parameter *param, int nr_fold, d
                for(; i >= 0; i--)
                {
                        param_tmp.p = i*max_p/num_p_steps;
-      double start_C_tmp;
-                       if( start_C <= 0)
+                       double start_C_tmp;
+                       if(start_C <= 0)
                                start_C_tmp = calc_start_C(prob, &param_tmp);
                        else
                                start_C_tmp = start_C;
-                       double best_C_tmp, best_score_tmp;
                        start_C_tmp = min(start_C_tmp, max_C);
+                       double best_C_tmp, best_score_tmp;
+                       
                        find_parameter_C(prob, &param_tmp, start_C_tmp, max_C, &best_C_tmp, &best_score_tmp, fold_start, perm, subprob, nr_fold);
+                       
                        if(best_score_tmp < *best_score)
                        {
                                *best_p = param_tmp.p;
index 0bc0ba8099bdafea8391404aeab48d623547b696..19190192e078ffb6360354fd5512b624c1b5ec81 100644 (file)
@@ -94,7 +94,7 @@ def train(arg1, arg2=None, arg3=None):
                -B bias : if bias >= 0, instance x becomes [x; bias]; if < 0, no bias term added (default -1)
                -wi weight: weights adjust the parameter C of different classes (see README for details)
                -v n: n-fold cross validation mode
-               "-C : find parameters (C for -s 0, 2 and C, p for -s 11)\n"
+               -C : find parameters (C for -s 0, 2 and C, p for -s 11)\n
                -q : quiet mode (no outputs)
        """
        prob, param = None, None