svmtrain.c
Go to the documentation of this file.
00001 #include <stdio.h>
00002 #include <stdlib.h>
00003 #include <string.h>
00004 #include <ctype.h>
00005 #include "../svm.h"
00006 
00007 #include "mex.h"
00008 #include "svm_model_matlab.h"
00009 
00010 #ifdef MX_API_VER
00011 #if MX_API_VER < 0x07030000
00012 typedef int mwIndex;
00013 #endif
00014 #endif
00015 
00016 #define CMD_LEN 2048
00017 #define Malloc(type,n) (type *)malloc((n)*sizeof(type))
00018 
00019 void print_null(const char *s) {}
00020 void print_string_matlab(const char *s) {mexPrintf(s);}
00021 
00022 void exit_with_help()
00023 {
00024         mexPrintf(
00025         "Usage: model = svmtrain(training_label_vector, training_instance_matrix, 'libsvm_options');\n"
00026         "libsvm_options:\n"
00027         "-s svm_type : set type of SVM (default 0)\n"
00028         "       0 -- C-SVC              (multi-class classification)\n"
00029         "       1 -- nu-SVC             (multi-class classification)\n"
00030         "       2 -- one-class SVM\n"
00031         "       3 -- epsilon-SVR        (regression)\n"
00032         "       4 -- nu-SVR             (regression)\n"
00033         "-t kernel_type : set type of kernel function (default 2)\n"
00034         "       0 -- linear: u'*v\n"
00035         "       1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
00036         "       2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
00037         "       3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
00038         "       4 -- precomputed kernel (kernel values in training_instance_matrix)\n"
00039         "-d degree : set degree in kernel function (default 3)\n"
00040         "-g gamma : set gamma in kernel function (default 1/num_features)\n"
00041         "-r coef0 : set coef0 in kernel function (default 0)\n"
00042         "-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
00043         "-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
00044         "-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
00045         "-m cachesize : set cache memory size in MB (default 100)\n"
00046         "-e epsilon : set tolerance of termination criterion (default 0.001)\n"
00047         "-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
00048         "-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
00049         "-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
00050         "-v n : n-fold cross validation mode\n"
00051         "-q : quiet mode (no outputs)\n"
00052         );
00053 }
00054 
00055 // svm arguments
00056 struct svm_parameter param;             // set by parse_command_line
00057 struct svm_problem prob;                // set by read_problem
00058 struct svm_model *model;
00059 struct svm_node *x_space;
00060 int cross_validation;
00061 int nr_fold;
00062 
00063 
00064 double do_cross_validation()
00065 {
00066         int i;
00067         int total_correct = 0;
00068         double total_error = 0;
00069         double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
00070         double *target = Malloc(double,prob.l);
00071         double retval = 0.0;
00072 
00073         svm_cross_validation(&prob,&param,nr_fold,target);
00074         if(param.svm_type == EPSILON_SVR ||
00075            param.svm_type == NU_SVR)
00076         {
00077                 for(i=0;i<prob.l;i++)
00078                 {
00079                         double y = prob.y[i];
00080                         double v = target[i];
00081                         total_error += (v-y)*(v-y);
00082                         sumv += v;
00083                         sumy += y;
00084                         sumvv += v*v;
00085                         sumyy += y*y;
00086                         sumvy += v*y;
00087                 }
00088                 mexPrintf("Cross Validation Mean squared error = %g\n",total_error/prob.l);
00089                 mexPrintf("Cross Validation Squared correlation coefficient = %g\n",
00090                         ((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/
00091                         ((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy))
00092                         );
00093                 retval = total_error/prob.l;
00094         }
00095         else
00096         {
00097                 for(i=0;i<prob.l;i++)
00098                         if(target[i] == prob.y[i])
00099                                 ++total_correct;
00100                 mexPrintf("Cross Validation Accuracy = %g%%\n",100.0*total_correct/prob.l);
00101                 retval = 100.0*total_correct/prob.l;
00102         }
00103         free(target);
00104         return retval;
00105 }
00106 
00107 // nrhs should be 3
00108 int parse_command_line(int nrhs, const mxArray *prhs[], char *model_file_name)
00109 {
00110         int i, argc = 1;
00111         char cmd[CMD_LEN];
00112         char *argv[CMD_LEN/2];
00113         void (*print_func)(const char *) = print_string_matlab; // default printing to matlab display
00114 
00115         // default values
00116         param.svm_type = C_SVC;
00117         param.kernel_type = RBF;
00118         param.degree = 3;
00119         param.gamma = 0;        // 1/num_features
00120         param.coef0 = 0;
00121         param.nu = 0.5;
00122         param.cache_size = 100;
00123         param.C = 1;
00124         param.eps = 1e-3;
00125         param.p = 0.1;
00126         param.shrinking = 1;
00127         param.probability = 0;
00128         param.nr_weight = 0;
00129         param.weight_label = NULL;
00130         param.weight = NULL;
00131         cross_validation = 0;
00132 
00133         if(nrhs <= 1)
00134                 return 1;
00135 
00136         if(nrhs > 2)
00137         {
00138                 // put options in argv[]
00139                 mxGetString(prhs[2], cmd, mxGetN(prhs[2]) + 1);
00140                 if((argv[argc] = strtok(cmd, " ")) != NULL)
00141                         while((argv[++argc] = strtok(NULL, " ")) != NULL)
00142                                 ;
00143         }
00144 
00145         // parse options
00146         for(i=1;i<argc;i++)
00147         {
00148                 if(argv[i][0] != '-') break;
00149                 ++i;
00150                 if(i>=argc && argv[i-1][1] != 'q')      // since option -q has no parameter
00151                         return 1;
00152                 switch(argv[i-1][1])
00153                 {
00154                         case 's':
00155                                 param.svm_type = atoi(argv[i]);
00156                                 break;
00157                         case 't':
00158                                 param.kernel_type = atoi(argv[i]);
00159                                 break;
00160                         case 'd':
00161                                 param.degree = atoi(argv[i]);
00162                                 break;
00163                         case 'g':
00164                                 param.gamma = atof(argv[i]);
00165                                 break;
00166                         case 'r':
00167                                 param.coef0 = atof(argv[i]);
00168                                 break;
00169                         case 'n':
00170                                 param.nu = atof(argv[i]);
00171                                 break;
00172                         case 'm':
00173                                 param.cache_size = atof(argv[i]);
00174                                 break;
00175                         case 'c':
00176                                 param.C = atof(argv[i]);
00177                                 break;
00178                         case 'e':
00179                                 param.eps = atof(argv[i]);
00180                                 break;
00181                         case 'p':
00182                                 param.p = atof(argv[i]);
00183                                 break;
00184                         case 'h':
00185                                 param.shrinking = atoi(argv[i]);
00186                                 break;
00187                         case 'b':
00188                                 param.probability = atoi(argv[i]);
00189                                 break;
00190                         case 'q':
00191                                 print_func = &print_null;
00192                                 i--;
00193                                 break;
00194                         case 'v':
00195                                 cross_validation = 1;
00196                                 nr_fold = atoi(argv[i]);
00197                                 if(nr_fold < 2)
00198                                 {
00199                                         mexPrintf("n-fold cross validation: n must >= 2\n");
00200                                         return 1;
00201                                 }
00202                                 break;
00203                         case 'w':
00204                                 ++param.nr_weight;
00205                                 param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
00206                                 param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
00207                                 param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
00208                                 param.weight[param.nr_weight-1] = atof(argv[i]);
00209                                 break;
00210                         default:
00211                                 mexPrintf("Unknown option -%c\n", argv[i-1][1]);
00212                                 return 1;
00213                 }
00214         }
00215 
00216         svm_set_print_string_function(print_func);
00217 
00218         return 0;
00219 }
00220 
00221 // read in a problem (in svmlight format)
00222 int read_problem_dense(const mxArray *label_vec, const mxArray *instance_mat)
00223 {
00224         int i, j, k;
00225         int elements, max_index, sc, label_vector_row_num;
00226         double *samples, *labels;
00227 
00228         prob.x = NULL;
00229         prob.y = NULL;
00230         x_space = NULL;
00231 
00232         labels = mxGetPr(label_vec);
00233         samples = mxGetPr(instance_mat);
00234         sc = (int)mxGetN(instance_mat);
00235 
00236         elements = 0;
00237         // the number of instance
00238         prob.l = (int)mxGetM(instance_mat);
00239         label_vector_row_num = (int)mxGetM(label_vec);
00240 
00241         if(label_vector_row_num!=prob.l)
00242         {
00243                 mexPrintf("Length of label vector does not match # of instances.\n");
00244                 return -1;
00245         }
00246 
00247         if(param.kernel_type == PRECOMPUTED)
00248                 elements = prob.l * (sc + 1);
00249         else
00250         {
00251                 for(i = 0; i < prob.l; i++)
00252                 {
00253                         for(k = 0; k < sc; k++)
00254                                 if(samples[k * prob.l + i] != 0)
00255                                         elements++;
00256                         // count the '-1' element
00257                         elements++;
00258                 }
00259         }
00260 
00261         prob.y = Malloc(double,prob.l);
00262         prob.x = Malloc(struct svm_node *,prob.l);
00263         x_space = Malloc(struct svm_node, elements);
00264 
00265         max_index = sc;
00266         j = 0;
00267         for(i = 0; i < prob.l; i++)
00268         {
00269                 prob.x[i] = &x_space[j];
00270                 prob.y[i] = labels[i];
00271 
00272                 for(k = 0; k < sc; k++)
00273                 {
00274                         if(param.kernel_type == PRECOMPUTED || samples[k * prob.l + i] != 0)
00275                         {
00276                                 x_space[j].index = k + 1;
00277                                 x_space[j].value = samples[k * prob.l + i];
00278                                 j++;
00279                         }
00280                 }
00281                 x_space[j++].index = -1;
00282         }
00283 
00284         if(param.gamma == 0 && max_index > 0)
00285                 param.gamma = 1.0/max_index;
00286 
00287         if(param.kernel_type == PRECOMPUTED)
00288                 for(i=0;i<prob.l;i++)
00289                 {
00290                         if((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > max_index)
00291                         {
00292                                 mexPrintf("Wrong input format: sample_serial_number out of range\n");
00293                                 return -1;
00294                         }
00295                 }
00296 
00297         return 0;
00298 }
00299 
00300 int read_problem_sparse(const mxArray *label_vec, const mxArray *instance_mat)
00301 {
00302         int i, j, k, low, high;
00303         mwIndex *ir, *jc;
00304         int elements, max_index, num_samples, label_vector_row_num;
00305         double *samples, *labels;
00306         mxArray *instance_mat_col; // transposed instance sparse matrix
00307 
00308         prob.x = NULL;
00309         prob.y = NULL;
00310         x_space = NULL;
00311 
00312         // transpose instance matrix
00313         {
00314                 mxArray *prhs[1], *plhs[1];
00315                 prhs[0] = mxDuplicateArray(instance_mat);
00316                 if(mexCallMATLAB(1, plhs, 1, prhs, "transpose"))
00317                 {
00318                         mexPrintf("Error: cannot transpose training instance matrix\n");
00319                         return -1;
00320                 }
00321                 instance_mat_col = plhs[0];
00322                 mxDestroyArray(prhs[0]);
00323         }
00324 
00325         // each column is one instance
00326         labels = mxGetPr(label_vec);
00327         samples = mxGetPr(instance_mat_col);
00328         ir = mxGetIr(instance_mat_col);
00329         jc = mxGetJc(instance_mat_col);
00330 
00331         num_samples = (int)mxGetNzmax(instance_mat_col);
00332 
00333         // the number of instance
00334         prob.l = (int)mxGetN(instance_mat_col);
00335         label_vector_row_num = (int)mxGetM(label_vec);
00336 
00337         if(label_vector_row_num!=prob.l)
00338         {
00339                 mexPrintf("Length of label vector does not match # of instances.\n");
00340                 return -1;
00341         }
00342 
00343         elements = num_samples + prob.l;
00344         max_index = (int)mxGetM(instance_mat_col);
00345 
00346         prob.y = Malloc(double,prob.l);
00347         prob.x = Malloc(struct svm_node *,prob.l);
00348         x_space = Malloc(struct svm_node, elements);
00349 
00350         j = 0;
00351         for(i=0;i<prob.l;i++)
00352         {
00353                 prob.x[i] = &x_space[j];
00354                 prob.y[i] = labels[i];
00355                 low = (int)jc[i], high = (int)jc[i+1];
00356                 for(k=low;k<high;k++)
00357                 {
00358                         x_space[j].index = (int)ir[k] + 1;
00359                         x_space[j].value = samples[k];
00360                         j++;
00361                 }
00362                 x_space[j++].index = -1;
00363         }
00364 
00365         if(param.gamma == 0 && max_index > 0)
00366                 param.gamma = 1.0/max_index;
00367 
00368         return 0;
00369 }
00370 
00371 static void fake_answer(mxArray *plhs[])
00372 {
00373         plhs[0] = mxCreateDoubleMatrix(0, 0, mxREAL);
00374 }
00375 
00376 // Interface function of matlab
00377 // now assume prhs[0]: label prhs[1]: features
00378 void mexFunction( int nlhs, mxArray *plhs[],
00379                 int nrhs, const mxArray *prhs[] )
00380 {
00381         const char *error_msg;
00382 
00383         // fix random seed to have same results for each run
00384         // (for cross validation and probability estimation)
00385         srand(1);
00386 
00387         // Transform the input Matrix to libsvm format
00388         if(nrhs > 1 && nrhs < 4)
00389         {
00390                 int err;
00391 
00392                 if(!mxIsDouble(prhs[0]) || !mxIsDouble(prhs[1])) {
00393                         mexPrintf("Error: label vector and instance matrix must be double\n");
00394                         fake_answer(plhs);
00395                         return;
00396                 }
00397 
00398                 if(parse_command_line(nrhs, prhs, NULL))
00399                 {
00400                         exit_with_help();
00401                         svm_destroy_param(&param);
00402                         fake_answer(plhs);
00403                         return;
00404                 }
00405 
00406                 if(mxIsSparse(prhs[1]))
00407                 {
00408                         if(param.kernel_type == PRECOMPUTED)
00409                         {
00410                                 // precomputed kernel requires dense matrix, so we make one
00411                                 mxArray *rhs[1], *lhs[1];
00412 
00413                                 rhs[0] = mxDuplicateArray(prhs[1]);
00414                                 if(mexCallMATLAB(1, lhs, 1, rhs, "full"))
00415                                 {
00416                                         mexPrintf("Error: cannot generate a full training instance matrix\n");
00417                                         svm_destroy_param(&param);
00418                                         fake_answer(plhs);
00419                                         return;
00420                                 }
00421                                 err = read_problem_dense(prhs[0], lhs[0]);
00422                                 mxDestroyArray(lhs[0]);
00423                                 mxDestroyArray(rhs[0]);
00424                         }
00425                         else
00426                                 err = read_problem_sparse(prhs[0], prhs[1]);
00427                 }
00428                 else
00429                         err = read_problem_dense(prhs[0], prhs[1]);
00430 
00431                 // svmtrain's original code
00432                 error_msg = svm_check_parameter(&prob, &param);
00433 
00434                 if(err || error_msg)
00435                 {
00436                         if (error_msg != NULL)
00437                                 mexPrintf("Error: %s\n", error_msg);
00438                         svm_destroy_param(&param);
00439                         free(prob.y);
00440                         free(prob.x);
00441                         free(x_space);
00442                         fake_answer(plhs);
00443                         return;
00444                 }
00445 
00446                 if(cross_validation)
00447                 {
00448                         double *ptr;
00449                         plhs[0] = mxCreateDoubleMatrix(1, 1, mxREAL);
00450                         ptr = mxGetPr(plhs[0]);
00451                         ptr[0] = do_cross_validation();
00452                 }
00453                 else
00454                 {
00455                         int nr_feat = (int)mxGetN(prhs[1]);
00456                         const char *error_msg;
00457                         model = svm_train(&prob, &param);
00458                         error_msg = model_to_matlab_structure(plhs, nr_feat, model);
00459                         if(error_msg)
00460                                 mexPrintf("Error: can't convert libsvm model to matrix structure: %s\n", error_msg);
00461                         svm_free_and_destroy_model(&model);
00462                 }
00463                 svm_destroy_param(&param);
00464                 free(prob.y);
00465                 free(prob.x);
00466                 free(x_space);
00467         }
00468         else
00469         {
00470                 exit_with_help();
00471                 fake_answer(plhs);
00472                 return;
00473         }
00474 }


ml_classifiers
Author(s): Scott Niekum
autogenerated on Fri Jan 3 2014 11:30:23