00001 #include <stdio.h>
00002 #include <stdlib.h>
00003 #include <string.h>
00004 #include <ctype.h>
00005 #include "svm.h"
00006
00007 #include "mex.h"
00008 #include "svm_model_matlab.h"
00009
00010 #ifdef MX_API_VER
00011 #if MX_API_VER < 0x07030000
00012 typedef int mwIndex;
00013 #endif
00014 #endif
00015
00016 #define CMD_LEN 2048
00017 #define Malloc(type,n) (type *)malloc((n)*sizeof(type))
00018
00019 void print_null(const char *s) {}
00020 void print_string_matlab(const char *s) {mexPrintf(s);}
00021
00022 void exit_with_help()
00023 {
00024 mexPrintf(
00025 "Usage: model = svmtrain(training_label_vector, training_instance_matrix, 'libsvm_options');\n"
00026 "libsvm_options:\n"
00027 "-s svm_type : set type of SVM (default 0)\n"
00028 " 0 -- C-SVC (multi-class classification)\n"
00029 " 1 -- nu-SVC (multi-class classification)\n"
00030 " 2 -- one-class SVM\n"
00031 " 3 -- epsilon-SVR (regression)\n"
00032 " 4 -- nu-SVR (regression)\n"
00033 "-t kernel_type : set type of kernel function (default 2)\n"
00034 " 0 -- linear: u'*v\n"
00035 " 1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
00036 " 2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
00037 " 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
00038 " 4 -- precomputed kernel (kernel values in training_instance_matrix)\n"
00039 "-d degree : set degree in kernel function (default 3)\n"
00040 "-g gamma : set gamma in kernel function (default 1/num_features)\n"
00041 "-r coef0 : set coef0 in kernel function (default 0)\n"
00042 "-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
00043 "-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
00044 "-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
00045 "-m cachesize : set cache memory size in MB (default 100)\n"
00046 "-e epsilon : set tolerance of termination criterion (default 0.001)\n"
00047 "-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
00048 "-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
00049 "-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
00050 "-v n : n-fold cross validation mode\n"
00051 "-q : quiet mode (no outputs)\n"
00052 );
00053 }
00054
00055
00056 struct svm_parameter param;
00057 struct svm_problem prob;
00058 struct svm_model *model;
00059 struct svm_node *x_space;
00060 int cross_validation;
00061 int nr_fold;
00062
00063
00064 double do_cross_validation()
00065 {
00066 int i;
00067 int total_correct = 0;
00068 double total_error = 0;
00069 double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
00070 double *target = Malloc(double,prob.l);
00071 double retval = 0.0;
00072
00073 svm_cross_validation(&prob,¶m,nr_fold,target);
00074 if(param.svm_type == EPSILON_SVR ||
00075 param.svm_type == NU_SVR)
00076 {
00077 for(i=0;i<prob.l;i++)
00078 {
00079 double y = prob.y[i];
00080 double v = target[i];
00081 total_error += (v-y)*(v-y);
00082 sumv += v;
00083 sumy += y;
00084 sumvv += v*v;
00085 sumyy += y*y;
00086 sumvy += v*y;
00087 }
00088 mexPrintf("Cross Validation Mean squared error = %g\n",total_error/prob.l);
00089 mexPrintf("Cross Validation Squared correlation coefficient = %g\n",
00090 ((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/
00091 ((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy))
00092 );
00093 retval = total_error/prob.l;
00094 }
00095 else
00096 {
00097 for(i=0;i<prob.l;i++)
00098 if(target[i] == prob.y[i])
00099 ++total_correct;
00100 mexPrintf("Cross Validation Accuracy = %g%%\n",100.0*total_correct/prob.l);
00101 retval = 100.0*total_correct/prob.l;
00102 }
00103 free(target);
00104 return retval;
00105 }
00106
00107
00108 int parse_command_line(int nrhs, const mxArray *prhs[], char *model_file_name)
00109 {
00110 int i, argc = 1;
00111 char cmd[CMD_LEN];
00112 char *argv[CMD_LEN/2];
00113 void (*print_func)(const char *) = print_string_matlab;
00114
00115
00116 param.svm_type = C_SVC;
00117 param.kernel_type = RBF;
00118 param.degree = 3;
00119 param.gamma = 0;
00120 param.coef0 = 0;
00121 param.nu = 0.5;
00122 param.cache_size = 100;
00123 param.C = 1;
00124 param.eps = 1e-3;
00125 param.p = 0.1;
00126 param.shrinking = 1;
00127 param.probability = 0;
00128 param.nr_weight = 0;
00129 param.weight_label = NULL;
00130 param.weight = NULL;
00131 cross_validation = 0;
00132
00133 if(nrhs <= 1)
00134 return 1;
00135
00136 if(nrhs > 2)
00137 {
00138
00139 mxGetString(prhs[2], cmd, mxGetN(prhs[2]) + 1);
00140 if((argv[argc] = strtok(cmd, " ")) != NULL)
00141 while((argv[++argc] = strtok(NULL, " ")) != NULL)
00142 ;
00143 }
00144
00145
00146 for(i=1;i<argc;i++)
00147 {
00148 if(argv[i][0] != '-') break;
00149 ++i;
00150 if(i>=argc && argv[i-1][1] != 'q')
00151 return 1;
00152 switch(argv[i-1][1])
00153 {
00154 case 's':
00155 param.svm_type = atoi(argv[i]);
00156 break;
00157 case 't':
00158 param.kernel_type = atoi(argv[i]);
00159 break;
00160 case 'd':
00161 param.degree = atoi(argv[i]);
00162 break;
00163 case 'g':
00164 param.gamma = atof(argv[i]);
00165 break;
00166 case 'r':
00167 param.coef0 = atof(argv[i]);
00168 break;
00169 case 'n':
00170 param.nu = atof(argv[i]);
00171 break;
00172 case 'm':
00173 param.cache_size = atof(argv[i]);
00174 break;
00175 case 'c':
00176 param.C = atof(argv[i]);
00177 break;
00178 case 'e':
00179 param.eps = atof(argv[i]);
00180 break;
00181 case 'p':
00182 param.p = atof(argv[i]);
00183 break;
00184 case 'h':
00185 param.shrinking = atoi(argv[i]);
00186 break;
00187 case 'b':
00188 param.probability = atoi(argv[i]);
00189 break;
00190 case 'q':
00191 print_func = &print_null;
00192 i--;
00193 break;
00194 case 'v':
00195 cross_validation = 1;
00196 nr_fold = atoi(argv[i]);
00197 if(nr_fold < 2)
00198 {
00199 mexPrintf("n-fold cross validation: n must >= 2\n");
00200 return 1;
00201 }
00202 break;
00203 case 'w':
00204 ++param.nr_weight;
00205 param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
00206 param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
00207 param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
00208 param.weight[param.nr_weight-1] = atof(argv[i]);
00209 break;
00210 default:
00211 mexPrintf("Unknown option -%c\n", argv[i-1][1]);
00212 return 1;
00213 }
00214 }
00215
00216 svm_set_print_string_function(print_func);
00217
00218 return 0;
00219 }
00220
00221
00222 int read_problem_dense(const mxArray *label_vec, const mxArray *instance_mat)
00223 {
00224
00225 size_t i, j, k, l;
00226 size_t elements, max_index, sc, label_vector_row_num;
00227 double *samples, *labels;
00228
00229 prob.x = NULL;
00230 prob.y = NULL;
00231 x_space = NULL;
00232
00233 labels = mxGetPr(label_vec);
00234 samples = mxGetPr(instance_mat);
00235 sc = mxGetN(instance_mat);
00236
00237 elements = 0;
00238
00239 l = mxGetM(instance_mat);
00240 label_vector_row_num = mxGetM(label_vec);
00241 prob.l = (int)l;
00242
00243 if(label_vector_row_num!=l)
00244 {
00245 mexPrintf("Length of label vector does not match # of instances.\n");
00246 return -1;
00247 }
00248
00249 if(param.kernel_type == PRECOMPUTED)
00250 elements = l * (sc + 1);
00251 else
00252 {
00253 for(i = 0; i < l; i++)
00254 {
00255 for(k = 0; k < sc; k++)
00256 if(samples[k * l + i] != 0)
00257 elements++;
00258
00259 elements++;
00260 }
00261 }
00262
00263 prob.y = Malloc(double,l);
00264 prob.x = Malloc(struct svm_node *,l);
00265 x_space = Malloc(struct svm_node, elements);
00266
00267 max_index = sc;
00268 j = 0;
00269 for(i = 0; i < l; i++)
00270 {
00271 prob.x[i] = &x_space[j];
00272 prob.y[i] = labels[i];
00273
00274 for(k = 0; k < sc; k++)
00275 {
00276 if(param.kernel_type == PRECOMPUTED || samples[k * l + i] != 0)
00277 {
00278 x_space[j].index = (int)k + 1;
00279 x_space[j].value = samples[k * l + i];
00280 j++;
00281 }
00282 }
00283 x_space[j++].index = -1;
00284 }
00285
00286 if(param.gamma == 0 && max_index > 0)
00287 param.gamma = (double)(1.0/max_index);
00288
00289 if(param.kernel_type == PRECOMPUTED)
00290 for(i=0;i<l;i++)
00291 {
00292 if((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > (int)max_index)
00293 {
00294 mexPrintf("Wrong input format: sample_serial_number out of range\n");
00295 return -1;
00296 }
00297 }
00298
00299 return 0;
00300 }
00301
00302 int read_problem_sparse(const mxArray *label_vec, const mxArray *instance_mat)
00303 {
00304 mwIndex *ir, *jc, low, high, k;
00305
00306 size_t i, j, l, elements, max_index, label_vector_row_num;
00307 mwSize num_samples;
00308 double *samples, *labels;
00309 mxArray *instance_mat_col;
00310
00311 prob.x = NULL;
00312 prob.y = NULL;
00313 x_space = NULL;
00314
00315
00316 {
00317 mxArray *prhs[1], *plhs[1];
00318 prhs[0] = mxDuplicateArray(instance_mat);
00319 if(mexCallMATLAB(1, plhs, 1, prhs, "transpose"))
00320 {
00321 mexPrintf("Error: cannot transpose training instance matrix\n");
00322 return -1;
00323 }
00324 instance_mat_col = plhs[0];
00325 mxDestroyArray(prhs[0]);
00326 }
00327
00328
00329 labels = mxGetPr(label_vec);
00330 samples = mxGetPr(instance_mat_col);
00331 ir = mxGetIr(instance_mat_col);
00332 jc = mxGetJc(instance_mat_col);
00333
00334 num_samples = mxGetNzmax(instance_mat_col);
00335
00336
00337 l = mxGetN(instance_mat_col);
00338 label_vector_row_num = mxGetM(label_vec);
00339 prob.l = (int) l;
00340
00341 if(label_vector_row_num!=l)
00342 {
00343 mexPrintf("Length of label vector does not match # of instances.\n");
00344 return -1;
00345 }
00346
00347 elements = num_samples + l;
00348 max_index = mxGetM(instance_mat_col);
00349
00350 prob.y = Malloc(double,l);
00351 prob.x = Malloc(struct svm_node *,l);
00352 x_space = Malloc(struct svm_node, elements);
00353
00354 j = 0;
00355 for(i=0;i<l;i++)
00356 {
00357 prob.x[i] = &x_space[j];
00358 prob.y[i] = labels[i];
00359 low = jc[i], high = jc[i+1];
00360 for(k=low;k<high;k++)
00361 {
00362 x_space[j].index = (int)ir[k] + 1;
00363 x_space[j].value = samples[k];
00364 j++;
00365 }
00366 x_space[j++].index = -1;
00367 }
00368
00369 if(param.gamma == 0 && max_index > 0)
00370 param.gamma = (double)(1.0/max_index);
00371
00372 return 0;
00373 }
00374
00375 static void fake_answer(int nlhs, mxArray *plhs[])
00376 {
00377 int i;
00378 for(i=0;i<nlhs;i++)
00379 plhs[i] = mxCreateDoubleMatrix(0, 0, mxREAL);
00380 }
00381
00382
00383
00384 void mexFunction( int nlhs, mxArray *plhs[],
00385 int nrhs, const mxArray *prhs[] )
00386 {
00387 const char *error_msg;
00388
00389
00390
00391 srand(1);
00392
00393 if(nlhs > 1)
00394 {
00395 exit_with_help();
00396 fake_answer(nlhs, plhs);
00397 return;
00398 }
00399
00400
00401 if(nrhs > 1 && nrhs < 4)
00402 {
00403 int err;
00404
00405 if(!mxIsDouble(prhs[0]) || !mxIsDouble(prhs[1]))
00406 {
00407 mexPrintf("Error: label vector and instance matrix must be double\n");
00408 fake_answer(nlhs, plhs);
00409 return;
00410 }
00411
00412 if(mxIsSparse(prhs[0]))
00413 {
00414 mexPrintf("Error: label vector should not be in sparse format\n");
00415 fake_answer(nlhs, plhs);
00416 return;
00417 }
00418
00419 if(parse_command_line(nrhs, prhs, NULL))
00420 {
00421 exit_with_help();
00422 svm_destroy_param(¶m);
00423 fake_answer(nlhs, plhs);
00424 return;
00425 }
00426
00427 if(mxIsSparse(prhs[1]))
00428 {
00429 if(param.kernel_type == PRECOMPUTED)
00430 {
00431
00432 mxArray *rhs[1], *lhs[1];
00433
00434 rhs[0] = mxDuplicateArray(prhs[1]);
00435 if(mexCallMATLAB(1, lhs, 1, rhs, "full"))
00436 {
00437 mexPrintf("Error: cannot generate a full training instance matrix\n");
00438 svm_destroy_param(¶m);
00439 fake_answer(nlhs, plhs);
00440 return;
00441 }
00442 err = read_problem_dense(prhs[0], lhs[0]);
00443 mxDestroyArray(lhs[0]);
00444 mxDestroyArray(rhs[0]);
00445 }
00446 else
00447 err = read_problem_sparse(prhs[0], prhs[1]);
00448 }
00449 else
00450 err = read_problem_dense(prhs[0], prhs[1]);
00451
00452
00453 error_msg = svm_check_parameter(&prob, ¶m);
00454
00455 if(err || error_msg)
00456 {
00457 if (error_msg != NULL)
00458 mexPrintf("Error: %s\n", error_msg);
00459 svm_destroy_param(¶m);
00460 free(prob.y);
00461 free(prob.x);
00462 free(x_space);
00463 fake_answer(nlhs, plhs);
00464 return;
00465 }
00466
00467 if(cross_validation)
00468 {
00469 double *ptr;
00470 plhs[0] = mxCreateDoubleMatrix(1, 1, mxREAL);
00471 ptr = mxGetPr(plhs[0]);
00472 ptr[0] = do_cross_validation();
00473 }
00474 else
00475 {
00476 int nr_feat = (int)mxGetN(prhs[1]);
00477 const char *error_msg;
00478 model = svm_train(&prob, ¶m);
00479 error_msg = model_to_matlab_structure(plhs, nr_feat, model);
00480 if(error_msg)
00481 mexPrintf("Error: can't convert libsvm model to matrix structure: %s\n", error_msg);
00482 svm_free_and_destroy_model(&model);
00483 }
00484 svm_destroy_param(¶m);
00485 free(prob.y);
00486 free(prob.x);
00487 free(x_space);
00488 }
00489 else
00490 {
00491 exit_with_help();
00492 fake_answer(nlhs, plhs);
00493 return;
00494 }
00495 }