GestureRecognitionToolkit  Version: 0.2.0
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
SVM.cpp
1 /*
2 GRT MIT License
3 Copyright (c) <2012> <Nicholas Gillian, Media Lab, MIT>
4 
5 Permission is hereby granted, free of charge, to any person obtaining a copy of this software
6 and associated documentation files (the "Software"), to deal in the Software without restriction,
7 including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
9 subject to the following conditions:
10 
11 The above copyright notice and this permission notice shall be included in all copies or substantial
12 portions of the Software.
13 
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
15 LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
16 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
17 WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
18 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
19 */
20 
21 #define GRT_DLL_EXPORTS
22 #include "SVM.h"
23 
24 GRT_BEGIN_NAMESPACE
25 
26 //Register the SVM module with the Classifier base class
27 RegisterClassifierModule< SVM > SVM::registerModule("SVM");
28 
29 SVM::SVM(UINT kernelType,UINT svmType,bool useScaling,bool useNullRejection,bool useAutoGamma,Float gamma,UINT degree,Float coef0,Float nu,Float C,bool useCrossValidation,UINT kFoldValue){
30 
31  //Setup the default SVM parameters
32  model = NULL;
33  param.weight_label = NULL;
34  param.weight = NULL;
35  prob.l = 0;
36  prob.x = NULL;
37  prob.y = NULL;
38  trained = false;
39  problemSet = false;
40  param.svm_type = C_SVC;
41  param.kernel_type = LINEAR_KERNEL;
42  param.degree = 3;
43  param.gamma = 0;
44  param.coef0 = 0;
45  param.nu = 0.5;
46  param.cache_size = 100;
47  param.C = 1;
48  param.eps = 1e-3;
49  param.p = 0.1;
50  param.shrinking = 1;
51  param.probability = 1;
52  param.nr_weight = 0;
53  param.weight_label = NULL;
54  param.weight = NULL;
55  this->useScaling = false;
56  this->useCrossValidation = false;
57  this->useNullRejection = false;
58  this->useAutoGamma = true;
59  classificationThreshold = 0.5;
60  crossValidationResult = 0;
61 
62  classType = "SVM";
63  classifierType = classType;
64  classifierMode = STANDARD_CLASSIFIER_MODE;
65  debugLog.setProceedingText("[DEBUG SVM]");
66  errorLog.setProceedingText("[ERROR SVM]");
67  trainingLog.setProceedingText("[TRAINING SVM]");
68  warningLog.setProceedingText("[WARNING SVM]");
69 
70  init(kernelType,svmType,useScaling,useNullRejection,useAutoGamma,gamma,degree,coef0,nu,C,useCrossValidation,kFoldValue);
71 }
72 
73 SVM::SVM(const SVM &rhs){
74  model = NULL;
75  param.weight_label = NULL;
76  param.weight = NULL;
77  prob.l = 0;
78  prob.x = NULL;
79  prob.y = NULL;
80  classType = "SVM";
81  classifierType = classType;
82  classifierMode = STANDARD_CLASSIFIER_MODE;
83  debugLog.setProceedingText("[DEBUG SVM]");
84  errorLog.setProceedingText("[ERROR SVM]");
85  trainingLog.setProceedingText("[TRAINING SVM]");
86  warningLog.setProceedingText("[WARNING SVM]");
87  *this = rhs;
88 }
89 
90 
92  clear();
93 }
94 
95 SVM& SVM::operator=(const SVM &rhs){
96  if( this != &rhs ){
97 
98  this->clear();
99 
100  //SVM variables
101  this->problemSet = false; //We do not copy the problem set
102  this->model = rhs.deepCopyModel();
103  this->deepCopyParam( rhs.param, this->param );
104  this->numInputDimensions = rhs.numInputDimensions;
105  this->kFoldValue = rhs.kFoldValue;
106  this->classificationThreshold = rhs.classificationThreshold;
107  this->crossValidationResult = rhs.crossValidationResult;
108  this->useAutoGamma = rhs.useAutoGamma;
109  this->useCrossValidation = rhs.useCrossValidation;
110 
111  //Classifier variables
112  copyBaseVariables( (Classifier*)&rhs );
113  }
114  return *this;
115 }
116 
117 bool SVM::deepCopyFrom(const Classifier *classifier){
118 
119  if( classifier == NULL ) return false;
120 
121  if( this->getClassifierType() == classifier->getClassifierType() ){
122  SVM *ptr = (SVM*)classifier;
123 
124  this->clear();
125 
126  //SVM variables
127  this->problemSet = false;
128  this->model = ptr->deepCopyModel();
129  this->deepCopyParam( ptr->param, this->param );
130  this->numInputDimensions = ptr->numInputDimensions;
131  this->kFoldValue = ptr->kFoldValue;
132  this->classificationThreshold = ptr->classificationThreshold;
133  this->crossValidationResult = ptr->crossValidationResult;
134  this->useAutoGamma = ptr->useAutoGamma;
135  this->useCrossValidation = ptr->useCrossValidation;
136 
137  //Classifier variables
138  return copyBaseVariables( classifier );
139  }
140 
141  return false;
142 }
143 
144 bool SVM::train_(ClassificationData &trainingData){
145 
146  //Clear any previous model
147  clear();
148 
149  if( trainingData.getNumSamples() == 0 ){
150  errorLog << "train_(ClassificationData &trainingData) - Training data has zero samples!" << std::endl;
151  return false;
152  }
153 
154  //Convert the labelled classification data into the LIBSVM data format
155  if( !convertClassificationDataToLIBSVMFormat(trainingData) ){
156  errorLog << "train_(ClassificationData &trainingData) - Failed To Convert Labelled Classification Data To LIBSVM Format!" << std::endl;
157  return false;
158  }
159 
160  if( useAutoGamma ) param.gamma = 1.0/numInputDimensions;
161 
162  //Train the model
163  bool trainingResult = trainSVM();
164 
165  if(! trainingResult ){
166  errorLog << "train_(ClassificationData &trainingData) - Failed To Train SVM Model!" << std::endl;
167  return false;
168  }
169 
170  return true;
171 }
172 
173 bool SVM::predict_(VectorFloat &inputVector){
174 
175  if( !trained ){
176  errorLog << "predict_(VectorFloat &inputVector) - The SVM model has not been trained!" << std::endl;
177  return false;
178  }
179 
180  if( inputVector.size() != numInputDimensions ){
181  errorLog << "predict_(VectorFloat &inputVector) - The size of the input vector (" << inputVector.size() << ") does not match the number of features of the model (" << numInputDimensions << ")" << std::endl;
182  return false;
183  }
184 
185  if( param.probability == 1 ){
186  if( !predictSVM( inputVector, maxLikelihood, classLikelihoods ) ){
187  errorLog << "predict(VectorFloat inputVector) - Prediction Failed!" << std::endl;
188  return false;
189  }
190  }else{
191  if( !predictSVM( inputVector ) ){
192  errorLog << "predict(VectorFloat inputVector) - Prediction Failed!" << std::endl;
193  return false;
194  }
195  }
196 
197  return true;
198 }
199 
200 bool SVM::init(UINT kernelType,UINT svmType,bool useScaling,bool useNullRejection,bool useAutoGamma,Float gamma,UINT degree,Float coef0,Float nu,Float C,bool useCrossValidation,UINT kFoldValue){
201 
202  //Clear any previous models or problems
203  clear();
204 
205  //Validate the kernerlType
206  if( !validateKernelType(kernelType) ){
207  errorLog << "init(...) - Unknown kernelType!\n";
208  return false;
209  }
210 
211  if( !validateSVMType(svmType) ){
212  errorLog << "init(...) - Unknown kernelType!\n";
213  return false;
214  }
215 
216  param.svm_type = (int)svmType;
217  param.kernel_type = (int)kernelType;
218  param.degree = (int)degree;
219  param.gamma = gamma;
220  param.coef0 = coef0;
221  param.nu = nu;
222  param.cache_size = 100;
223  param.C = C;
224  param.eps = 1e-3;
225  param.p = 0.1;
226  param.shrinking = 1;
227  param.probability = 1;
228  param.nr_weight = 0;
229  param.weight_label = NULL;
230  param.weight = NULL;
231  this->useScaling = useScaling;
232  this->useCrossValidation = useCrossValidation;
233  this->useNullRejection = useNullRejection;
234  this->useAutoGamma = useAutoGamma;
235  classificationThreshold = 0.5;
236  crossValidationResult = 0;
237 
238  return true;
239 }
240 
241 void SVM::deleteProblemSet(){
242  if( problemSet ){
243  for(int i=0; i<prob.l; i++){
244  delete[] prob.x[i];
245  prob.x[i] = NULL;
246  }
247  delete[] prob.x;
248  delete[] prob.y;
249  prob.l = 0;
250  prob.x = NULL;
251  prob.y = NULL;
252  problemSet = false;
253  }
254 }
255 
257 
258  //Clear any previous models, parameters or probelms
259  clear();
260 
261  //Setup the SVM parameters
262  param.svm_type = C_SVC;
263  param.kernel_type = LINEAR_KERNEL;
264  param.degree = 3;
265  param.gamma = 0;
266  param.coef0 = 0;
267  param.nu = 0.5;
268  param.cache_size = 100;
269  param.C = 1;
270  param.eps = 1e-3;
271  param.p = 0.1;
272  param.shrinking = 1;
273  param.probability = 1;
274  param.nr_weight = 0;
275  param.weight_label = NULL;
276  param.weight = NULL;
277  useCrossValidation = false;
278  kFoldValue = 10;
279  useAutoGamma = true;
280 }
281 
282 bool SVM::validateProblemAndParameters(){
283  //Check the parameters match the problem
284  const char *errorMsg = svm_check_parameter(&prob,&param);
285 
286  if( errorMsg ){
287  errorLog << "validateProblemAndParameters() - Parameters do not match problem!" << std::endl;
288  return false;
289  }
290 
291  return true;
292 }
293 
294 bool SVM::trainSVM(){
295 
296  crossValidationResult = 0;
297 
298  //Erase any previous models
299  if( trained ){
300  svm_free_and_destroy_model(&model);
301  trained = false;
302  }
303 
304  //Check to make sure the problem has been set
305  if( !problemSet ){
306  errorLog << "trainSVM() - Problem not set!" << std::endl;
307  return false;
308  }
309 
310  //Verify the problem and the parameters
311  if( !validateProblemAndParameters() ) return false;
312 
313  //Scale the training data if needed
314  if( useScaling ){
315  for(int i=0; i<prob.l; i++)
316  for(UINT j=0; j<numInputDimensions; j++)
317  prob.x[i][j].value = grt_scale(prob.x[i][j].value,ranges[j].minValue,ranges[j].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
318  }
319 
320  if( useCrossValidation ){
321  int i;
322  Float total_correct = 0;
323  Float total_error = 0;
324  Float sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
325  Float *target = new Float[prob.l];
326 
327  svm_cross_validation(&prob,&param,kFoldValue,target);
328  if( param.svm_type == EPSILON_SVR || param.svm_type == NU_SVR )
329  {
330  for(i=0;i<prob.l;i++)
331  {
332  Float y = prob.y[i];
333  Float v = target[i];
334  total_error += (v-y)*(v-y);
335  sumv += v;
336  sumy += y;
337  sumvv += v*v;
338  sumyy += y*y;
339  sumvy += v*y;
340  }
341  crossValidationResult = total_error/prob.l;
342  }
343  else
344  {
345  for(i=0;i<prob.l;i++){
346  if(target[i] == prob.y[i]){
347  ++total_correct;
348  }
349  }
350  crossValidationResult = total_correct/prob.l*100.0;
351  }
352  delete[] target;
353  }
354 
355  //Train the SVM - if we are running cross validation then the CV will be run first followed by a full train
356  model = svm_train(&prob,&param);
357 
358  if( model == NULL ){
359  errorLog << "trainSVM() - Failed to train SVM Model!" << std::endl;
360  return false;
361  }
362 
363  if( model != NULL ){
364  trained = true;
365  numClasses = getNumClasses();
366  classLabels.resize( getNumClasses() );
367  for(UINT k=0; k<getNumClasses(); k++){
368  classLabels[k] = model->label[k];
369  }
370  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
371  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
372  }
373 
374  return trained;
375 }
376 
377 bool SVM::predictSVM(VectorFloat &inputVector){
378 
379  if( !trained || inputVector.size() != numInputDimensions ) return false;
380 
381  svm_node *x = NULL;
382 
383  //Copy the input data into the SVM format
384  x = new svm_node[numInputDimensions+1];
385  for(UINT j=0; j<numInputDimensions; j++){
386  x[j].index = (int)j+1;
387  x[j].value = inputVector[j];
388  }
389  //The last value in the input vector must be set to -1
390  x[numInputDimensions].index = -1;
391  x[numInputDimensions].value = 0;
392 
393  //Scale the input data if required
394  if( useScaling ){
395  for(UINT i=0; i<numInputDimensions; i++)
396  x[i].value = grt_scale(x[i].value,ranges[i].minValue,ranges[i].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
397  }
398 
399  //Perform the SVM prediction
400  Float predict_label = svm_predict(model,x);
401 
402  //We can't do null rejection without the probabilities, so just set the predicted class
403  predictedClassLabel = (UINT)predict_label;
404 
405  //Clean up the memory
406  delete[] x;
407 
408  return true;
409 }
410 
411 bool SVM::predictSVM(VectorFloat &inputVector,Float &maxProbability, VectorFloat &probabilites){
412 
413  if( !trained || param.probability == 0 || inputVector.size() != numInputDimensions ) return false;
414 
415  Float *prob_estimates = NULL;
416  svm_node *x = NULL;
417 
418  //Setup the memory for the probability estimates
419  prob_estimates = new Float[ model->nr_class ];
420 
421  //Copy the input data into the SVM format
422  x = new svm_node[numInputDimensions+1];
423  for(UINT j=0; j<numInputDimensions; j++){
424  x[j].index = (int)j+1;
425  x[j].value = inputVector[j];
426  }
427  //The last value in the input vector must be set to -1
428  x[numInputDimensions].index = -1;
429  x[numInputDimensions].value = 0;
430 
431  //Scale the input data if required
432  if( useScaling ){
433  for(UINT j=0; j<numInputDimensions; j++)
434  x[j].value = grt_scale(x[j].value,ranges[j].minValue,ranges[j].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
435  }
436 
437  //Perform the SVM prediction
438  Float predict_label = svm_predict_probability(model,x,prob_estimates);
439 
440  predictedClassLabel = 0;
441  maxProbability = 0;
442  probabilites.resize(model->nr_class);
443  for(int k=0; k<model->nr_class; k++){
444  if( maxProbability < prob_estimates[k] ){
445  maxProbability = prob_estimates[k];
446  predictedClassLabel = k+1;
447  maxLikelihood = maxProbability;
448  }
449  probabilites[k] = prob_estimates[k];
450  }
451 
452  if( !useNullRejection ) predictedClassLabel = (UINT)predict_label;
453  else{
454  if( maxProbability >= classificationThreshold ){
455  predictedClassLabel = (UINT)predict_label;
456  }else predictedClassLabel = GRT_DEFAULT_NULL_CLASS_LABEL;
457  }
458 
459  //Clean up the memory
460  delete[] prob_estimates;
461  delete[] x;
462 
463  return true;
464 }
465 
466 bool SVM::convertClassificationDataToLIBSVMFormat(ClassificationData &trainingData){
467 
468  //clear any previous problems
469  deleteProblemSet();
470 
471  const UINT numTrainingExamples = trainingData.getNumSamples();
472  numInputDimensions = trainingData.getNumDimensions();
473 
474  //Compute the ranges encase the data should be scaled
475  ranges = trainingData.getRanges();
476 
477  //Init the memory
478  prob.l = numTrainingExamples;
479  prob.x = new svm_node*[numTrainingExamples];
480  prob.y = new Float[numTrainingExamples];
481  problemSet = true;
482 
483  for(UINT i=0; i<numTrainingExamples; i++){
484  //Set the class ID
485  prob.y[i] = trainingData[i].getClassLabel();
486 
487  //Assign the memory for this training example, note that a dummy node is needed at the end of the vector
488  prob.x[i] = new svm_node[numInputDimensions+1];
489  for(UINT j=0; j<numInputDimensions; j++){
490  prob.x[i][j].index = j+1;
491  prob.x[i][j].value = trainingData[i].getSample()[j];
492  }
493  prob.x[i][numInputDimensions].index = -1; //Assign the final node value
494  prob.x[i][numInputDimensions].value = 0;
495  }
496 
497  return true;
498 }
499 
500 bool SVM::save( std::fstream &file ) const{
501 
502  if( !file.is_open() ){
503  return false;
504  }
505 
506  file << "SVM_MODEL_FILE_V2.0\n";
507 
508  //Write the classifier settings to the file
510  errorLog <<"save(fstream &file) - Failed to save classifier base settings to file!" << std::endl;
511  return false;
512  }
513 
514  const svm_parameter& param = trained ? model->param : this->param;
515 
516  file << "ModelType: ";
517  switch( param.svm_type ){
518  case(C_SVC):
519  file << "C_SVC";
520  break;
521  case(NU_SVC):
522  file << "NU_SVC";
523  break;
524  case(ONE_CLASS):
525  file << "ONE_CLASS";
526  break;
527  case(EPSILON_SVR):
528  file << "EPSILON_SVR";
529  break;
530  case(NU_SVR):
531  file << "NU_SVR";
532  break;
533  default:
534  errorLog << "save(fstream &file) - Invalid model type: " << param.svm_type << std::endl;
535  return false;
536  break;
537  }
538  file << std::endl;
539 
540  file << "KernelType: ";
541  switch(param.kernel_type){
542  case(LINEAR):
543  file << "LINEAR";
544  break;
545  case(POLY):
546  file << "POLYNOMIAL";
547  break;
548  case(RBF):
549  file << "RBF";
550  break;
551  case(SIGMOID):
552  file << "SIGMOID";
553  break;
554  case(PRECOMPUTED):
555  file << "PRECOMPUTED";
556  break;
557  default:
558  errorLog << "save(fstream &file) - Invalid kernel type: " << param.kernel_type << std::endl;
559  return false;
560  break;
561  }
562  file << std::endl;
563  file << "Degree: " << param.degree << std::endl;
564  file << "Gamma: " << param.gamma << std::endl;
565  file << "Coef0: " << param.coef0 << std::endl;
566  file << "NumberOfFeatures: " << numInputDimensions << std::endl;
567  file << "UseShrinking: " << param.shrinking << std::endl;
568  file << "UseProbability: " << param.probability << std::endl;
569 
570  if( trained ){
571  UINT numClasses = (UINT)model->nr_class;
572  UINT numSV = (UINT)model->l;
573  file << "NumberOfSupportVectors: " << numSV << std::endl;
574 
575  file << "RHO: \n";
576  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->rho[i] << "\t";
577  file << "\n";
578 
579  if(model->label){
580  file << "Label: \n";
581  for(UINT i=0;i<numClasses;i++) file << model->label[i] << "\t";
582  file << "\n";
583  }
584 
585  if(model->probA){ // regression has probA only
586  file << "ProbA: \n";
587  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->probA[i] << "\t";
588  file << "\n";
589  }
590 
591  if(model->probB){
592  file << "ProbB: \n";
593  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->probB[i] << "\t";
594  file << "\n";
595  }
596 
597  if(model->nSV){
598  file << "NumSupportVectorsPerClass: \n";
599  for(UINT i=0;i<numClasses;i++) file << model->nSV[i] << "\t";
600  file << "\n";
601  }
602 
603  file << "SupportVectors: \n";
604 
605  const Float * const *sv_coef = model->sv_coef;
606  const svm_node * const *SV = model->SV;
607 
608  for(UINT i=0;i<numSV;i++){
609  for(UINT j=0;j<numClasses-1;j++)
610  file << sv_coef[j][i] << "\t";
611 
612  const svm_node *p = SV[i];
613 
614  if(param.kernel_type == PRECOMPUTED) file << (int) p->value << "\t";
615  else{
616  while(p->index != -1){
617  file << p->index << "\t" << p->value << "\t";
618  p++;
619  }
620  file << "\n";
621  }
622  }
623  }
624 
625  return true;
626 }
627 
628 bool SVM::load( std::fstream &file ){
629 
630  std::string word;
631  UINT numSV = 0;
632  UINT halfNumClasses = 0;
633 
634  //Clear any previous models, parameters or problems
635  clear();
636 
637  if( !file.is_open() ){
638  errorLog << "load(fstream &file) - The file is not open!" << std::endl;
639  return false;
640  }
641 
642  //Read the file header
643  file >> word;
644 
645  //Check to see if we should load a legacy file
646  if( word == "SVM_MODEL_FILE_V1.0" ){
647  return loadLegacyModelFromFile( file );
648  }
649 
650  //Check to make sure this is a file with the correct File Format
651  if( word != "SVM_MODEL_FILE_V2.0" ){
652  errorLog << "load(fstream &file) - Invalid file format!" << std::endl;
653  clear();
654  return false;
655  }
656 
657  //Load the base settings from the file
659  errorLog << "load(string filename) - Failed to load base settings from file!" << std::endl;
660  return false;
661  }
662 
663  //Init the memory for the model
664  model = new svm_model;
665  model->nr_class = 0;
666  model->l = 0;
667  model->SV = NULL;
668  model->sv_coef = NULL;
669  model->rho = NULL;
670  model->probA = NULL;
671  model->probB = NULL;
672  model->label = NULL;
673  model->nSV = NULL;
674  model->label = NULL;
675  model->nSV = NULL;
676  model->free_sv = 0; //This will be set to 1 if everything is loaded OK
677 
678  //Init the memory for the parameters
679  model->param.svm_type = 0;
680  model->param.kernel_type = 0;
681  model->param.degree = 0;
682  model->param.gamma = 0;
683  model->param.coef0 = 0;
684  model->param.cache_size = 0;
685  model->param.eps = 0;
686  model->param.C = 0;
687  model->param.nr_weight = 0;
688  model->param.weight_label = NULL;
689  model->param.weight = NULL;
690  model->param.nu = 0;
691  model->param.p = 0;
692  model->param.shrinking = 0;
693  model->param.probability = 1;
694 
695  //Load the model type
696  file >> word;
697  if(word != "ModelType:"){
698  errorLog << "load(fstream &file) - Failed to find ModelType header!" << std::endl;
699  clear();
700  return false;
701  }
702  file >> word;
703  if( word == "C_SVC" ){
704  model->param.svm_type = C_SVC;
705  }else{
706  if( word == "NU_SVC" ){
707  model->param.svm_type = NU_SVC;
708  }else{
709  if( word == "ONE_CLASS" ){
710  model->param.svm_type = ONE_CLASS;
711  }else{
712  if( word == "EPSILON_SVR" ){
713  model->param.svm_type = EPSILON_SVR;
714  }else{
715  if( word == "NU_SVR" ){
716  model->param.svm_type = NU_SVR;
717  }else{
718  errorLog << "load(fstream &file) - Failed to find SVM type!" << std::endl;
719  clear();
720  return false;
721  }
722  }
723  }
724  }
725  }
726 
727  //Load the model type
728  file >> word;
729  if(word != "KernelType:"){
730  errorLog << "load(fstream &file) - Failed to find kernel type!" << std::endl;
731  clear();
732  return false;
733  }
734  file >> word;
735  if( word == "LINEAR" ){
736  model->param.kernel_type = LINEAR;
737  }else{
738  if( word == "POLYNOMIAL" ){
739  model->param.kernel_type = POLY;
740  }else{
741  if( word == "RBF" ){
742  model->param.kernel_type = RBF;
743  }else{
744  if( word == "SIGMOID" ){
745  model->param.kernel_type = SIGMOID;
746  }else{
747  if( word == "PRECOMPUTED" ){
748  model->param.kernel_type = PRECOMPUTED;
749  }else{
750  errorLog << "load(fstream &file) - Failed to find kernel type!" << std::endl;
751  clear();
752  return false;
753  }
754  }
755  }
756  }
757  }
758 
759  //Load the degree
760  file >> word;
761  if(word != "Degree:"){
762  errorLog << "load(fstream &file) - Failed to find Degree header!" << std::endl;
763  clear();
764  return false;
765  }
766  file >> model->param.degree;
767 
768  //Load the gamma
769  file >> word;
770  if(word != "Gamma:"){
771  errorLog << "load(fstream &file) - Failed to find Gamma header!" << std::endl;
772  clear();
773  return false;
774  }
775  file >> model->param.gamma;
776 
777  //Load the Coef0
778  file >> word;
779  if(word != "Coef0:"){
780  errorLog << "load(fstream &file) - Failed to find Coef0 header!" << std::endl;
781  clear();
782  return false;
783  }
784  file >> model->param.coef0;
785 
786  //Load the NumberOfFeatures
787  file >> word;
788  if(word != "NumberOfFeatures:"){
789  errorLog << "load(fstream &file) - Failed to find NumberOfFeatures header!" << std::endl;
790  clear();
791  return false;
792  }
793  file >> numInputDimensions;
794 
795  //Load the UseShrinking
796  file >> word;
797  if(word != "UseShrinking:"){
798  errorLog << "load(fstream &file) - Failed to find UseShrinking header!" << std::endl;
799  clear();
800  return false;
801  }
802  file >> model->param.shrinking;
803 
804  //Load the UseProbability
805  file >> word;
806  if(word != "UseProbability:"){
807  errorLog << "load(fstream &file) - Failed to find UseProbability header!" << std::endl;
808  clear();
809  return false;
810  }
811  file >> model->param.probability;
812 
813  if( trained ){
814  //Load the NumberOfSupportVectors
815  file >> word;
816  if(word != "NumberOfSupportVectors:"){
817  errorLog << "load(fstream &file) - Failed to find NumberOfSupportVectors header!" << std::endl;
818  clear();
819  return false;
820  }
821  file >> numSV;
822 
823  //Setup the values
824  halfNumClasses = numClasses*(numClasses-1)/2;
825  model->nr_class = numClasses;
826  model->l = numSV;
827 
828  //Load the RHO
829  file >> word;
830  if(word != "RHO:"){
831  errorLog << "load(fstream &file) - Failed to find RHO header!" << std::endl;
832  clear();
833  return false;
834  }
835  model->rho = new Float[ halfNumClasses ];
836  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->rho[i];
837 
838  //See if we can load the Labels
839  file >> word;
840  if(word != "Label:"){
841  model->label = NULL;
842  }else{
843  model->label = new int[ numClasses ];
844  for(UINT i=0;i<numClasses;i++) file >> model->label[i];
845  //We only need to read a new line if we found the label!
846  file >> word;
847  }
848 
849  //See if we can load the ProbA
850  //We don't need to read another line here
851  if(word != "ProbA:"){
852  model->probA = NULL;
853  }else{
854  model->probA = new Float[ halfNumClasses ];
855  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probA[i];
856  //We only need to read a new line if we found the label!
857  file >> word;
858  }
859 
860  //See if we can load the ProbB
861  //We don't need to read another line here
862  if(word != "ProbB:"){
863  model->probB = NULL;
864  }else{
865  model->probB = new Float[ halfNumClasses ];
866  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probB[i];
867  //We only need to read a new line if we found the label!
868  file >> word;
869  }
870 
871  //See if we can load the NumSupportVectorsPerClass
872  //We don't need to read another line here
873  if( word == "NumSupportVectorsPerClass:" ){
874  model->nSV = new int[ numClasses ];
875  for(UINT i=0; i<numClasses; i++) file >> model->nSV[i];
876  //We only need to read a new line if we found the label!
877  file >> word;
878  }else{
879  model->nSV = NULL;
880  }
881 
882  //Load the SupportVectors
883  //We don't need to read another line here
884  if(word != "SupportVectors:"){
885  errorLog << "load(fstream &file) - Failed to find SupportVectors header!" << std::endl;
886  clear();
887  return false;
888  }
889 
890  //Setup the memory
891  model->sv_coef = new Float*[numClasses-1];
892  for(UINT j=0;j<numClasses-1;j++) model->sv_coef[j] = new Float[numSV];
893  model->SV = new svm_node*[numSV];
894 
895  for(UINT i=0; i<numSV; i++){
896  for(UINT j=0; j<numClasses-1; j++){
897  file >> model->sv_coef[j][i];
898  }
899 
900  model->SV[i] = new svm_node[numInputDimensions+1];
901 
902  if(model->param.kernel_type == PRECOMPUTED) file >> model->SV[i][0].value;
903  else{
904  for(UINT j=0; j<numInputDimensions; j++){
905  file >> model->SV[i][j].index;
906  file >> model->SV[i][j].value;
907  }
908  model->SV[i][numInputDimensions].index = -1; //Assign the final node value
909  model->SV[i][numInputDimensions].value = 0;
910  }
911  }
912 
913  //Set the class labels
914  this->numClasses = getNumClasses();
915  classLabels.resize(getNumClasses());
916  for(UINT k=0; k<getNumClasses(); k++){
917  classLabels[k] = model->label[k];
918  }
919 
920  //The SV have now been loaded so flag that they should be deleted
921  model->free_sv = 1;
922 
923  //Resize the prediction results to make sure it is setup for realtime prediction
924  maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
925  bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
926  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
927  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
928  }
929 
930  return true;
931 }
932 
933 bool SVM::clear(){
934 
935  //Clear the base class
937 
938  crossValidationResult = 0;
939  trained = false;
940  svm_free_and_destroy_model(&model);
941  svm_destroy_param(&param);
942  deleteProblemSet();
943 
944  return true;
945 }
946 
948  return useCrossValidation;
949 }
950 
952  return useAutoGamma;
953 }
954 
955 std::string SVM::getSVMType() const{
956 
957  const struct svm_parameter *paramPtr = NULL;
958  std::string modelName = "UNKNOWN";
959  if( trained ){
960  paramPtr = &model->param;
961  }else paramPtr = &param;
962 
963  switch(paramPtr->svm_type){
964  case(C_SVC):
965  modelName = "C_SVC";
966  break;
967  case(NU_SVC):
968  modelName = "NU_SVC";
969  break;
970  case(ONE_CLASS):
971  modelName = "ONE_CLASS";
972  break;
973  case(EPSILON_SVR):
974  modelName = "EPSILON_SVR";
975  break;
976  case(NU_SVR):
977  modelName = "NU_SVR";
978  break;
979  default:
980  break;
981  }
982 
983  return modelName;
984 }
985 
986 std::string SVM::getKernelType() const{
987  const struct svm_parameter *paramPtr = NULL;
988  std::string modelName = "UNKNOWN";
989  if( trained ){
990  paramPtr = &model->param;
991  }else paramPtr = &param;
992 
993  switch(paramPtr->kernel_type){
994  case(LINEAR_KERNEL):
995  modelName = "LINEAR_KERNEL";
996  break;
997  case(POLY_KERNEL):
998  modelName = "POLY_KERNEL";
999  break;
1000  case(RBF_KERNEL):
1001  modelName = "RBF_KERNEL";
1002  break;
1003  case(SIGMOID_KERNEL):
1004  modelName = "SIGMOID_KERNEL";
1005  break;
1006  case(PRECOMPUTED_KERNEL):
1007  modelName = "PRECOMPUTED_KERNEL";
1008  break;
1009  default:
1010  break;
1011  }
1012  return modelName;
1013 }
1014 
1015 UINT SVM::getNumClasses() const{
1016  if( !trained ) return 0;
1017  return (UINT) model->nr_class;
1018 }
1019 
1020 UINT SVM::getDegree() const{
1021  if( trained ){
1022  return (UINT)model->param.degree;
1023  }
1024  return (UINT)param.gamma;
1025 }
1026 
1027 Float SVM::getGamma() const{
1028  if( trained ){
1029  return model->param.gamma;
1030  }
1031  return param.gamma;
1032 }
1033 
1034 Float SVM::getNu() const{
1035  if( trained ){
1036  return model->param.nu;
1037  }
1038  return param.gamma;
1039 }
1040 
1041 Float SVM::getCoef0() const{
1042  if( trained ){
1043  return model->param.coef0;
1044  }
1045  return param.gamma;
1046 }
1047 
1048 Float SVM::getC() const{
1049  if( trained ){
1050  return model->param.C;
1051  }
1052  return param.gamma;
1053 }
1054 
1055 Float SVM::getCrossValidationResult() const{ return crossValidationResult; }
1056 
1057 bool SVM::setSVMType(const UINT svmType){
1058  if( validateSVMType(svmType) ){
1059  param.svm_type = (int)svmType;
1060  return true;
1061  }
1062  return false;
1063 }
1064 
1065 bool SVM::setKernelType(const UINT kernelType){
1066  if( validateKernelType(kernelType) ){
1067  param.kernel_type = (int)kernelType;
1068  return true;
1069  }
1070  warningLog << "setKernelType(UINT kernelType) - Failed to set kernel type, unknown kernelType!" << std::endl;
1071  return false;
1072 }
1073 
1074 bool SVM::setGamma(const Float gamma){
1075  if( !useAutoGamma ){
1076  this->param.gamma = gamma;
1077  return true;
1078  }
1079  warningLog << "setGamma(Float gamma) - Failed to set gamma, useAutoGamma is enabled, setUseAutoGamma to false first!" << std::endl;
1080  return false;
1081 }
1082 
1083 bool SVM::setDegree(const UINT degree){
1084  this->param.degree = (int)degree;
1085  return true;
1086 }
1087 
1088 bool SVM::setNu(const Float nu){
1089  this->param.nu = nu;
1090  return true;
1091 }
1092 
1093 bool SVM::setCoef0(const Float coef0){
1094  this->param.coef0 = coef0;
1095  return true;
1096 }
1097 
1098 bool SVM::setC(const Float C){
1099  this->param.C = C;
1100  return true;
1101 }
1102 
1103 bool SVM::setKFoldCrossValidationValue(const UINT kFoldValue){
1104  if( kFoldValue > 0 ){
1105  this->kFoldValue = kFoldValue;
1106  return true;
1107  }
1108  warningLog << "setKFoldCrossValidationValue(const UINT kFoldValue) - Failed to set kFoldValue, the kFoldValue must be greater than 0!" << std::endl;
1109  return false;
1110 }
1111 
1112 bool SVM::enableAutoGamma(const bool useAutoGamma){
1113  this->useAutoGamma = useAutoGamma;
1114  return true;
1115 }
1116 
1117 bool SVM::enableCrossValidationTraining(const bool useCrossValidation){
1118  this->useCrossValidation = useCrossValidation;
1119  return true;
1120 }
1121 
1122 bool SVM::validateSVMType(const UINT svmType){
1123  if( svmType == C_SVC ){
1124  return true;
1125  }
1126  if( svmType == NU_SVC ){
1127  return true;
1128  }
1129  if( svmType == ONE_CLASS ){
1130  return true;
1131  }
1132  if( svmType == EPSILON_SVR ){
1133  return true;
1134  }
1135  if( svmType == NU_SVR ){
1136  return true;
1137  }
1138  return false;
1139 }
1140 
1141 bool SVM::validateKernelType(const UINT kernelType){
1142  if( kernelType == LINEAR_KERNEL ){
1143  return true;
1144  }
1145  if( kernelType == POLY_KERNEL ){
1146  return true;
1147  }
1148  if( kernelType == RBF_KERNEL ){
1149  return true;
1150  }
1151  if( kernelType == SIGMOID_KERNEL ){
1152  return true;
1153  }
1154  if( kernelType == PRECOMPUTED_KERNEL ){
1155  return true;
1156  }
1157  return false;
1158 }
1159 
1160 struct svm_model* SVM::deepCopyModel() const{
1161 
1162  if( model == NULL ) return NULL;
1163 
1164  UINT halfNumClasses = 0;
1165 
1166  //Init the memory for the model
1167  struct svm_model *m = new svm_model;
1168  m->nr_class = 0;
1169  m->l = 0;
1170  m->SV = NULL;
1171  m->sv_coef = NULL;
1172  m->rho = NULL;
1173  m->probA = NULL;
1174  m->probB = NULL;
1175  m->label = NULL;
1176  m->nSV = NULL;
1177  m->label = NULL;
1178  m->nSV = NULL;
1179  m->free_sv = 0; //This will be set to 1 if everything is loaded OK
1180 
1181  //Init the memory for the parameters
1182  m->param.svm_type = 0;
1183  m->param.kernel_type = 0;
1184  m->param.degree = 0;
1185  m->param.gamma = 0;
1186  m->param.coef0 = 0;
1187  m->param.cache_size = 0;
1188  m->param.eps = 0;
1189  m->param.C = 0;
1190  m->param.nr_weight = 0;
1191  m->param.weight_label = NULL;
1192  m->param.weight = NULL;
1193  m->param.nu = 0;
1194  m->param.p = 0;
1195  m->param.shrinking = 0;
1196  m->param.probability = 1;
1197 
1198  //Copy the parameters
1199  m->param.svm_type = model->param.svm_type;
1200  m->param.kernel_type = model->param.kernel_type ;
1201  m->param.degree = model->param.degree;
1202  m->param.gamma = model->param.gamma;
1203  m->param.coef0 = model->param.coef0;
1204  m->nr_class = model->nr_class;
1205  m->l = model->l;
1206  m->param.shrinking = model->param.shrinking;
1207  m->param.probability = model->param.probability;
1208 
1209  //Setup the values
1210  halfNumClasses = model->nr_class*(model->nr_class-1)/2;
1211 
1212  m->rho = new Float[ halfNumClasses ];
1213  for(int i=0;i <model->nr_class*(model->nr_class-1)/2; i++) m->rho[i] = model->rho[i];
1214 
1215  if( model->label != NULL ){
1216  m->label = new int[ model->nr_class ];
1217  for(int i=0;i<model->nr_class;i++) m->label[i] = model->label[i];
1218  }
1219 
1220  if( model->probA != NULL ){
1221  m->probA = new Float[ halfNumClasses ];
1222  for(UINT i=0;i<halfNumClasses; i++) m->probA[i] = model->probA[i];
1223  }
1224 
1225  if( model->probB != NULL ){
1226  m->probB = new Float[ halfNumClasses ];
1227  for(UINT i=0; i<halfNumClasses; i++) m->probB[i] = model->probB[i];
1228  }
1229 
1230  if( model->nSV != NULL ){
1231  m->nSV = new int[ model->nr_class ];
1232  for(int i=0; i<model->nr_class; i++) m->nSV[i] = model->nSV[i];
1233  }
1234 
1235  //Setup the memory
1236  m->sv_coef = new Float*[numClasses-1];
1237  for(UINT j=0;j<numClasses-1;j++) m->sv_coef[j] = new Float[model->l];
1238  m->SV = new svm_node*[model->l];
1239 
1240  for(int i=0; i<model->l; i++){
1241  for(int j=0; j<model->nr_class-1; j++){
1242  m->sv_coef[j][i] = model->sv_coef[j][i];
1243  }
1244 
1245  m->SV[i] = new svm_node[numInputDimensions+1];
1246 
1247  if(model->param.kernel_type == PRECOMPUTED) m->SV[i][0].value = model->SV[i][0].value;
1248  else{
1249  for(UINT j=0; j<numInputDimensions; j++){
1250  m->SV[i][j].index = model->SV[i][j].index;
1251  m->SV[i][j].value = model->SV[i][j].value;
1252  }
1253  m->SV[i][numInputDimensions].index = -1; //Assign the final node value
1254  m->SV[i][numInputDimensions].value = 0;
1255  }
1256  }
1257 
1258  //The SV have now been loaded so flag that they should be deleted
1259  m->free_sv = 1;
1260 
1261  return m;
1262 }
1263 
1264 bool SVM::deepCopyProblem( const struct svm_problem &source, struct svm_problem &target, const unsigned int numInputDimensions ) const{
1265 
1266  //Cleanup the target memory
1267  if( target.y != NULL ){
1268  delete[] target.y;
1269  target.y = NULL;
1270  }
1271  if( target.x != NULL ){
1272  for(int i=0; i<target.l; i++){
1273  delete[] target.x[i];
1274  target.x[i] = NULL;
1275  }
1276  }
1277 
1278  //Deep copy the source to the target
1279  target.l = source.l;
1280 
1281  if( source.x != NULL ){
1282  target.x = new svm_node*[ target.l ];
1283  for(int i=0; i<target.l; i++){
1284  target.x[i] = new svm_node[ numInputDimensions+1 ];
1285  for(unsigned int j=0; j<numInputDimensions+1; j++){
1286  target.x[i][j] = source.x[i][j];
1287  }
1288  }
1289  }
1290 
1291  if( source.y != NULL ){
1292  target.y = new Float[ target.l ];
1293  for(int i=0; i<target.l; i++){
1294  target.y[i] = source.y[i];
1295  }
1296  }
1297 
1298  return true;
1299 }
1300 
1301 bool SVM::deepCopyParam( const svm_parameter &source_param, svm_parameter &target_param ) const{
1302 
1303  //Cleanup any dynamic memory in the target
1304  if( target_param.weight_label != NULL ){
1305  delete[] target_param.weight_label;
1306  target_param.weight_label = NULL;
1307  }
1308  if( target_param.weight != NULL ){
1309  delete[] target_param.weight;
1310  target_param.weight = NULL;
1311  }
1312 
1313  //Copy the non dynamic variables
1314  target_param.svm_type = source_param.svm_type;
1315  target_param.kernel_type = source_param.kernel_type;
1316  target_param.degree = source_param.degree;
1317  target_param.gamma = source_param.gamma;
1318  target_param.coef0 = source_param.coef0;
1319  target_param.cache_size = source_param.cache_size;
1320  target_param.eps = source_param.eps;
1321  target_param.C = source_param.C;
1322  target_param.nr_weight = source_param.nr_weight;
1323  target_param.nu = source_param.nu;
1324  target_param.p = source_param.p;
1325  target_param.shrinking = source_param.shrinking;
1326  target_param.probability = source_param.probability;
1327 
1328  //Copy any dynamic memory
1329  if( source_param.weight_label != NULL ){
1330 
1331  }
1332  if( source_param.weight != NULL ){
1333 
1334  }
1335 
1336  return true;
1337 }
1338 
1339 bool SVM::loadLegacyModelFromFile( std::fstream &file ){
1340 
1341  std::string word;
1342 
1343  UINT numSV = 0;
1344  UINT halfNumClasses = 0;
1345  numInputDimensions = 0;
1346 
1347  //Init the memory for the model
1348  model = new svm_model;
1349  model->nr_class = 0;
1350  model->l = 0;
1351  model->SV = NULL;
1352  model->sv_coef = NULL;
1353  model->rho = NULL;
1354  model->probA = NULL;
1355  model->probB = NULL;
1356  model->label = NULL;
1357  model->nSV = NULL;
1358  model->label = NULL;
1359  model->nSV = NULL;
1360  model->free_sv = 0; //This will be set to 1 if everything is loaded OK
1361 
1362  //Init the memory for the parameters
1363  model->param.svm_type = 0;
1364  model->param.kernel_type = 0;
1365  model->param.degree = 0;
1366  model->param.gamma = 0;
1367  model->param.coef0 = 0;
1368  model->param.cache_size = 0;
1369  model->param.eps = 0;
1370  model->param.C = 0;
1371  model->param.nr_weight = 0;
1372  model->param.weight_label = NULL;
1373  model->param.weight = NULL;
1374  model->param.nu = 0;
1375  model->param.p = 0;
1376  model->param.shrinking = 0;
1377  model->param.probability = 1;
1378 
1379  //Load the model type
1380  file >> word;
1381  if(word != "ModelType:"){
1382  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find ModelType header!" << std::endl;
1383  clear();
1384  return false;
1385  }
1386  file >> word;
1387  if( word == "C_SVC" ){
1388  model->param.svm_type = C_SVC;
1389  }else{
1390  if( word == "NU_SVC" ){
1391  model->param.svm_type = NU_SVC;
1392  }else{
1393  if( word == "ONE_CLASS" ){
1394  model->param.svm_type = ONE_CLASS;
1395  }else{
1396  if( word == "EPSILON_SVR" ){
1397  model->param.svm_type = EPSILON_SVR;
1398  }else{
1399  if( word == "NU_SVR" ){
1400  model->param.svm_type = NU_SVR;
1401  }else{
1402  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find SVM type!" << std::endl;
1403  clear();
1404  return false;
1405  }
1406  }
1407  }
1408  }
1409  }
1410 
1411  //Load the model type
1412  file >> word;
1413  if(word != "KernelType:"){
1414  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find kernel type!" << std::endl;
1415  clear();
1416  return false;
1417  }
1418  file >> word;
1419  if( word == "LINEAR" ){
1420  model->param.kernel_type = LINEAR;
1421  }else{
1422  if( word == "POLYNOMIAL" ){
1423  model->param.kernel_type = POLY;
1424  }else{
1425  if( word == "RBF" ){
1426  model->param.kernel_type = RBF;
1427  }else{
1428  if( word == "SIGMOID" ){
1429  model->param.kernel_type = SIGMOID;
1430  }else{
1431  if( word == "PRECOMPUTED" ){
1432  model->param.kernel_type = PRECOMPUTED;
1433  }else{
1434  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find kernel type!" << std::endl;
1435  clear();
1436  return false;
1437  }
1438  }
1439  }
1440  }
1441  }
1442 
1443  //Load the degree
1444  file >> word;
1445  if(word != "Degree:"){
1446  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find Degree header!" << std::endl;
1447  clear();
1448  return false;
1449  }
1450  file >> model->param.degree;
1451 
1452  //Load the gamma
1453  file >> word;
1454  if(word != "Gamma:"){
1455  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find Gamma header!" << std::endl;
1456  clear();
1457  return false;
1458  }
1459  file >> model->param.gamma;
1460 
1461  //Load the Coef0
1462  file >> word;
1463  if(word != "Coef0:"){
1464  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find Coef0 header!" << std::endl;
1465  clear();
1466  return false;
1467  }
1468  file >> model->param.coef0;
1469 
1470  //Load the NumberOfClasses
1471  file >> word;
1472  if(word != "NumberOfClasses:"){
1473  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfClasses header!" << std::endl;
1474  clear();
1475  return false;
1476  }
1477  file >> numClasses;
1478 
1479  //Load the NumberOfSupportVectors
1480  file >> word;
1481  if(word != "NumberOfSupportVectors:"){
1482  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfSupportVectors header!" << std::endl;
1483  clear();
1484  return false;
1485  }
1486  file >> numSV;
1487 
1488  //Load the NumberOfFeatures
1489  file >> word;
1490  if(word != "NumberOfFeatures:"){
1491  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfFeatures header!" << std::endl;
1492  clear();
1493  return false;
1494  }
1495  file >> numInputDimensions;
1496 
1497  //Load the UseShrinking
1498  file >> word;
1499  if(word != "UseShrinking:"){
1500  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find UseShrinking header!" << std::endl;
1501  clear();
1502  return false;
1503  }
1504  file >> model->param.shrinking;
1505 
1506  //Load the UseProbability
1507  file >> word;
1508  if(word != "UseProbability:"){
1509  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find UseProbability header!" << std::endl;
1510  clear();
1511  return false;
1512  }
1513  file >> model->param.probability;
1514 
1515  //Load the UseScaling
1516  file >> word;
1517  if(word != "UseScaling:"){
1518  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find UseScaling header!" << std::endl;
1519  clear();
1520  return false;
1521  }
1522  file >> useScaling;
1523 
1524  //Load the Ranges
1525  file >> word;
1526  if(word != "Ranges:"){
1527  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find Ranges header!" << std::endl;
1528  clear();
1529  return false;
1530  }
1531  //Setup the memory for the ranges
1532  ranges.clear();
1533  ranges.resize(numInputDimensions);
1534 
1536  for(UINT i=0; i<ranges.size(); i++){
1537  file >> ranges[i].minValue;
1538  file >> ranges[i].maxValue;
1539  }
1540 
1541  //Setup the values
1542  halfNumClasses = numClasses*(numClasses-1)/2;
1543  model->nr_class = numClasses;
1544  model->l = numSV;
1545 
1546  //Load the RHO
1547  file >> word;
1548  if(word != "RHO:"){
1549  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find RHO header!" << std::endl;
1550  clear();
1551  return false;
1552  }
1553  model->rho = new Float[ halfNumClasses ];
1554  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->rho[i];
1555 
1556  //See if we can load the Labels
1557  file >> word;
1558  if(word != "Label:"){
1559  model->label = NULL;
1560  }else{
1561  model->label = new int[ numClasses ];
1562  for(UINT i=0;i<numClasses;i++) file >> model->label[i];
1563  //We only need to read a new line if we found the label!
1564  file >> word;
1565  }
1566 
1567  //See if we can load the ProbA
1568  //We don't need to read another line here
1569  if(word != "ProbA:"){
1570  model->probA = NULL;
1571  }else{
1572  model->probA = new Float[ halfNumClasses ];
1573  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probA[i];
1574  //We only need to read a new line if we found the label!
1575  file >> word;
1576  }
1577 
1578  //See if we can load the ProbB
1579  //We don't need to read another line here
1580  if(word != "ProbB:"){
1581  model->probB = NULL;
1582  }else{
1583  model->probB = new Float[ halfNumClasses ];
1584  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probB[i];
1585  //We only need to read a new line if we found the label!
1586  file >> word;
1587  }
1588 
1589  //See if we can load the NumSupportVectorsPerClass
1590  //We don't need to read another line here
1591  if(word != "NumSupportVectorsPerClass:"){
1592  model->nSV = NULL;
1593  }else{
1594  model->nSV = new int[ numClasses ];
1595  for(UINT i=0;i<numClasses;i++) file >> model->nSV[i];
1596  //We only need to read a new line if we found the label!
1597  file >> word;
1598  }
1599 
1600  //Load the SupportVectors
1601  //We don't need to read another line here
1602  if(word != "SupportVectors:"){
1603  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find SupportVectors header!" << std::endl;
1604  clear();
1605  return false;
1606  }
1607 
1608  //Setup the memory
1609  model->sv_coef = new Float*[numClasses-1];
1610  for(UINT j=0;j<numClasses-1;j++) model->sv_coef[j] = new Float[numSV];
1611  model->SV = new svm_node*[numSV];
1612 
1613  for(UINT i=0; i<numSV; i++){
1614  for(UINT j=0; j<numClasses-1; j++){
1615  file >> model->sv_coef[j][i];
1616  }
1617 
1618  model->SV[i] = new svm_node[numInputDimensions+1];
1619 
1620  if(model->param.kernel_type == PRECOMPUTED) file >> model->SV[i][0].value;
1621  else{
1622  for(UINT j=0; j<numInputDimensions; j++){
1623  file >> model->SV[i][j].index;
1624  file >> model->SV[i][j].value;
1625  }
1626  model->SV[i][numInputDimensions].index = -1; //Assign the final node value
1627  model->SV[i][numInputDimensions].value = 0;
1628  }
1629  }
1630 
1631  //Set the class labels
1632  this->numClasses = getNumClasses();
1633  classLabels.resize(getNumClasses());
1634  for(UINT k=0; k<getNumClasses(); k++){
1635  classLabels[k] = model->label[k];
1636  }
1637 
1638  //The SV have now been loaded so flag that they should be deleted
1639  model->free_sv = 1;
1640 
1641  //Finally, flag that the model has been trained to show it has been loaded and can be used for prediction
1642  trained = true;
1643 
1644  return true;
1645 }
1646 
1647 GRT_END_NAMESPACE
bool saveBaseSettingsToFile(std::fstream &file) const
Definition: Classifier.cpp:256
std::string getKernelType() const
Definition: SVM.cpp:986
bool setCoef0(const Float coef0)
Definition: SVM.cpp:1093
#define DEFAULT_NULL_LIKELIHOOD_VALUE
Definition: Classifier.h:38
Float getCrossValidationResult() const
Definition: SVM.cpp:1055
bool enableCrossValidationTraining(const bool useCrossValidation)
Definition: SVM.cpp:1117
bool setC(const Float C)
Definition: SVM.cpp:1098
virtual bool clear()
Definition: SVM.cpp:933
bool loadLegacyModelFromFile(std::fstream &file)
Definition: SVM.cpp:1339
virtual ~SVM()
Definition: SVM.cpp:91
bool enableAutoGamma(const bool useAutoGamma)
Definition: SVM.cpp:1112
bool getIsCrossValidationTrainingEnabled() const
Definition: SVM.cpp:947
bool setSVMType(const UINT svmType)
Definition: SVM.cpp:1057
Float getNu() const
Definition: SVM.cpp:1034
std::string getClassifierType() const
Definition: Classifier.cpp:161
SVM(UINT kernelType=LINEAR_KERNEL, UINT svmType=C_SVC, bool useScaling=true, bool useNullRejection=false, bool useAutoGamma=true, Float gamma=0.1, UINT degree=3, Float coef0=0, Float nu=0.5, Float C=1, bool useCrossValidation=false, UINT kFoldValue=10)
Definition: SVM.cpp:29
std::string getSVMType() const
Definition: SVM.cpp:955
bool getIsAutoGammaEnabled() const
Definition: SVM.cpp:951
virtual UINT getNumClasses() const
Definition: SVM.cpp:1015
virtual bool resize(const unsigned int size)
Definition: Vector.h:133
virtual bool load(std::fstream &file)
Definition: SVM.cpp:628
This class acts as a front end for the LIBSVM library (http://www.csie.ntu.edu.tw/~cjlin/libsvm/). It implements a Support Vector Machine (SVM) classifier, a powerful classifier that works well on a wide range of classification problems, particularly on more complex problems that other classifiers (such as the KNN, GMM or ANBC algorithms) might not be able to solve.
void initDefaultSVMSettings()
Definition: SVM.cpp:256
virtual bool save(std::fstream &file) const
Definition: SVM.cpp:500
Float getCoef0() const
Definition: SVM.cpp:1041
UINT getNumSamples() const
Definition: SVM.h:49
bool init(UINT kernelType, UINT svmType, bool useScaling, bool useNullRejection, bool useAutoGamma, Float gamma, UINT degree, Float coef0, Float nu, Float C, bool useCrossValidation, UINT kFoldValue)
Definition: SVM.cpp:200
bool setDegree(const UINT degree)
Definition: SVM.cpp:1083
bool setKernelType(const UINT kernelType)
Definition: SVM.cpp:1065
bool copyBaseVariables(const Classifier *classifier)
Definition: Classifier.cpp:93
bool loadBaseSettingsFromFile(std::fstream &file)
Definition: Classifier.cpp:303
Float getGamma() const
Definition: SVM.cpp:1027
UINT getNumDimensions() const
virtual bool train_(ClassificationData &trainingData)
Definition: SVM.cpp:144
bool setKFoldCrossValidationValue(const UINT kFoldValue)
Definition: SVM.cpp:1103
Vector< MinMax > getRanges() const
bool setNu(const Float nu)
Definition: SVM.cpp:1088
bool setGamma(const Float gamma)
Definition: SVM.cpp:1074
UINT getDegree() const
Definition: SVM.cpp:1020
virtual bool deepCopyFrom(const Classifier *classifier)
Definition: SVM.cpp:117
Float getC() const
Definition: SVM.cpp:1048
virtual bool clear()
Definition: Classifier.cpp:142
SVM & operator=(const SVM &rhs)
Definition: SVM.cpp:95
virtual bool predict_(VectorFloat &inputVector)
Definition: SVM.cpp:173