11 #if MX_API_VER < 0x07030000
17 #define Malloc(type,n) (type *)malloc((n)*sizeof(type))
25 "Usage: model = svmtrain(training_label_vector, training_instance_matrix, 'libsvm_options');\n"
27 "-s svm_type : set type of SVM (default 0)\n"
30 " 2 -- one-class SVM\n"
33 "-t kernel_type : set type of kernel function (default 2)\n"
34 " 0 -- linear: u'*v\n"
35 " 1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
36 " 2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
37 " 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
38 " 4 -- precomputed kernel (kernel values in training_instance_matrix)\n"
39 "-d degree : set degree in kernel function (default 3)\n"
40 "-g gamma : set gamma in kernel function (default 1/num_features)\n"
41 "-r coef0 : set coef0 in kernel function (default 0)\n"
42 "-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
43 "-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
44 "-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
45 "-m cachesize : set cache memory size in MB (default 100)\n"
46 "-e epsilon : set tolerance of termination criterion (default 0.001)\n"
47 "-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
48 "-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
49 "-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
50 "-v n : n-fold cross validation mode\n"
51 "-q : quiet mode (no outputs)\n"
67 int total_correct = 0;
68 double total_error = 0;
69 double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
81 total_error += (v-y)*(v-y);
88 mexPrintf(
"Cross Validation Mean squared error = %g\n",total_error/
prob.
l);
89 mexPrintf(
"Cross Validation Squared correlation coefficient = %g\n",
90 ((
prob.
l*sumvy-sumv*sumy)*(
prob.
l*sumvy-sumv*sumy))/
91 ((
prob.
l*sumvv-sumv*sumv)*(
prob.
l*sumyy-sumy*sumy))
93 retval = total_error/
prob.
l;
98 if(target[i] ==
prob.
y[i])
100 mexPrintf(
"Cross Validation Accuracy = %g%%\n",100.0*total_correct/
prob.
l);
101 retval = 100.0*total_correct/
prob.
l;
139 mxGetString(prhs[2],
cmd, mxGetN(prhs[2]) + 1);
140 if((argv[argc] = strtok(
cmd,
" ")) != NULL)
141 while((argv[++argc] = strtok(NULL,
" ")) != NULL)
148 if(argv[i][0] !=
'-')
break;
150 if(i>=argc && argv[i-1][1] !=
'q')
199 mexPrintf(
"n-fold cross validation: n must >= 2\n");
211 mexPrintf(
"Unknown option -%c\n", argv[i-1][1]);
225 int elements,
max_index, sc, label_vector_row_num;
226 double *samples, *labels;
232 labels = mxGetPr(label_vec);
233 samples = mxGetPr(instance_mat);
234 sc = (int)mxGetN(instance_mat);
238 prob.
l = (int)mxGetM(instance_mat);
239 label_vector_row_num = (int)mxGetM(label_vec);
241 if(label_vector_row_num!=
prob.
l)
243 mexPrintf(
"Length of label vector does not match # of instances.\n");
248 elements =
prob.
l * (sc + 1);
251 for(i = 0; i <
prob.
l; i++)
253 for(k = 0; k < sc; k++)
254 if(samples[k *
prob.
l + i] != 0)
267 for(i = 0; i <
prob.
l; i++)
270 prob.
y[i] = labels[i];
272 for(k = 0; k < sc; k++)
288 for(i=0;i<
prob.
l;i++)
292 mexPrintf(
"Wrong input format: sample_serial_number out of range\n");
302 int i, j, k, low, high;
304 int elements,
max_index, num_samples, label_vector_row_num;
305 double *samples, *labels;
306 mxArray *instance_mat_col;
314 mxArray *prhs[1], *plhs[1];
315 prhs[0] = mxDuplicateArray(instance_mat);
316 if(mexCallMATLAB(1, plhs, 1, prhs,
"transpose"))
318 mexPrintf(
"Error: cannot transpose training instance matrix\n");
321 instance_mat_col = plhs[0];
322 mxDestroyArray(prhs[0]);
326 labels = mxGetPr(label_vec);
327 samples = mxGetPr(instance_mat_col);
328 ir = mxGetIr(instance_mat_col);
329 jc = mxGetJc(instance_mat_col);
331 num_samples = (int)mxGetNzmax(instance_mat_col);
334 prob.
l = (int)mxGetN(instance_mat_col);
335 label_vector_row_num = (int)mxGetM(label_vec);
337 if(label_vector_row_num!=
prob.
l)
339 mexPrintf(
"Length of label vector does not match # of instances.\n");
343 elements = num_samples +
prob.
l;
344 max_index = (int)mxGetM(instance_mat_col);
351 for(i=0;i<
prob.
l;i++)
354 prob.
y[i] = labels[i];
355 low = (int)jc[i], high = (
int)jc[i+1];
356 for(k=low;k<high;k++)
373 plhs[0] = mxCreateDoubleMatrix(0, 0, mxREAL);
379 int nrhs,
const mxArray *prhs[] )
381 const char *error_msg;
388 if(nrhs > 1 && nrhs < 4)
392 if(!mxIsDouble(prhs[0]) || !mxIsDouble(prhs[1])) {
393 mexPrintf(
"Error: label vector and instance matrix must be double\n");
406 if(mxIsSparse(prhs[1]))
411 mxArray *rhs[1], *lhs[1];
413 rhs[0] = mxDuplicateArray(prhs[1]);
414 if(mexCallMATLAB(1, lhs, 1, rhs,
"full"))
416 mexPrintf(
"Error: cannot generate a full training instance matrix\n");
422 mxDestroyArray(lhs[0]);
423 mxDestroyArray(rhs[0]);
436 if (error_msg != NULL)
437 mexPrintf(
"Error: %s\n", error_msg);
449 plhs[0] = mxCreateDoubleMatrix(1, 1, mxREAL);
450 ptr = mxGetPr(plhs[0]);
455 int nr_feat = (int)mxGetN(prhs[1]);
456 const char *error_msg;
460 mexPrintf(
"Error: can't convert libsvm model to matrix structure: %s\n", error_msg);