GestureRecognitionToolkit  Version: 0.1.0
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
SVM.cpp
1 /*
2  GRT MIT License
3  Copyright (c) <2012> <Nicholas Gillian, Media Lab, MIT>
4 
5  Permission is hereby granted, free of charge, to any person obtaining a copy of this software
6  and associated documentation files (the "Software"), to deal in the Software without restriction,
7  including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
9  subject to the following conditions:
10 
11  The above copyright notice and this permission notice shall be included in all copies or substantial
12  portions of the Software.
13 
14  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
15  LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
16  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
17  WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
18  SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
19  */
20 
21 #include "SVM.h"
22 
23 GRT_BEGIN_NAMESPACE
24 
25 //Register the SVM module with the Classifier base class
26 RegisterClassifierModule< SVM > SVM::registerModule("SVM");
27 
28 SVM::SVM(UINT kernelType,UINT svmType,bool useScaling,bool useNullRejection,bool useAutoGamma,Float gamma,UINT degree,Float coef0,Float nu,Float C,bool useCrossValidation,UINT kFoldValue){
29 
30  //Setup the default SVM parameters
31  model = NULL;
32  param.weight_label = NULL;
33  param.weight = NULL;
34  prob.l = 0;
35  prob.x = NULL;
36  prob.y = NULL;
37  trained = false;
38  problemSet = false;
39  param.svm_type = C_SVC;
40  param.kernel_type = LINEAR_KERNEL;
41  param.degree = 3;
42  param.gamma = 0;
43  param.coef0 = 0;
44  param.nu = 0.5;
45  param.cache_size = 100;
46  param.C = 1;
47  param.eps = 1e-3;
48  param.p = 0.1;
49  param.shrinking = 1;
50  param.probability = 1;
51  param.nr_weight = 0;
52  param.weight_label = NULL;
53  param.weight = NULL;
54  this->useScaling = false;
55  this->useCrossValidation = false;
56  this->useNullRejection = false;
57  this->useAutoGamma = true;
58  classificationThreshold = 0.5;
59  crossValidationResult = 0;
60 
61  classType = "SVM";
62  classifierType = classType;
63  classifierMode = STANDARD_CLASSIFIER_MODE;
64  debugLog.setProceedingText("[DEBUG SVM]");
65  errorLog.setProceedingText("[ERROR SVM]");
66  trainingLog.setProceedingText("[TRAINING SVM]");
67  warningLog.setProceedingText("[WARNING SVM]");
68 
69  init(kernelType,svmType,useScaling,useNullRejection,useAutoGamma,gamma,degree,coef0,nu,C,useCrossValidation,kFoldValue);
70 }
71 
72 SVM::SVM(const SVM &rhs){
73  model = NULL;
74  param.weight_label = NULL;
75  param.weight = NULL;
76  prob.l = 0;
77  prob.x = NULL;
78  prob.y = NULL;
79  classType = "SVM";
80  classifierType = classType;
81  classifierMode = STANDARD_CLASSIFIER_MODE;
82  debugLog.setProceedingText("[DEBUG SVM]");
83  errorLog.setProceedingText("[ERROR SVM]");
84  trainingLog.setProceedingText("[TRAINING SVM]");
85  warningLog.setProceedingText("[WARNING SVM]");
86  *this = rhs;
87 }
88 
89 
91  clear();
92 }
93 
94 SVM& SVM::operator=(const SVM &rhs){
95  if( this != &rhs ){
96 
97  this->clear();
98 
99  //SVM variables
100  this->problemSet = false; //We do not copy the problem set
101  this->model = rhs.deepCopyModel();
102  this->deepCopyParam( rhs.param, this->param );
103  this->numInputDimensions = rhs.numInputDimensions;
104  this->kFoldValue = rhs.kFoldValue;
105  this->classificationThreshold = rhs.classificationThreshold;
106  this->crossValidationResult = rhs.crossValidationResult;
107  this->useAutoGamma = rhs.useAutoGamma;
108  this->useCrossValidation = rhs.useCrossValidation;
109 
110  //Classifier variables
111  copyBaseVariables( (Classifier*)&rhs );
112  }
113  return *this;
114 }
115 
116 bool SVM::deepCopyFrom(const Classifier *classifier){
117 
118  if( classifier == NULL ) return false;
119 
120  if( this->getClassifierType() == classifier->getClassifierType() ){
121  SVM *ptr = (SVM*)classifier;
122 
123  this->clear();
124 
125  //SVM variables
126  this->problemSet = false;
127  this->model = ptr->deepCopyModel();
128  this->deepCopyParam( ptr->param, this->param );
129  this->numInputDimensions = ptr->numInputDimensions;
130  this->kFoldValue = ptr->kFoldValue;
131  this->classificationThreshold = ptr->classificationThreshold;
132  this->crossValidationResult = ptr->crossValidationResult;
133  this->useAutoGamma = ptr->useAutoGamma;
134  this->useCrossValidation = ptr->useCrossValidation;
135 
136  //Classifier variables
137  return copyBaseVariables( classifier );
138  }
139 
140  return false;
141 }
142 
143 bool SVM::train_(ClassificationData &trainingData){
144 
145  //Clear any previous model
146  clear();
147 
148  if( trainingData.getNumSamples() == 0 ){
149  errorLog << "train_(ClassificationData &trainingData) - Training data has zero samples!" << std::endl;
150  return false;
151  }
152 
153  //Convert the labelled classification data into the LIBSVM data format
154  if( !convertClassificationDataToLIBSVMFormat(trainingData) ){
155  errorLog << "train_(ClassificationData &trainingData) - Failed To Convert Labelled Classification Data To LIBSVM Format!" << std::endl;
156  return false;
157  }
158 
159  if( useAutoGamma ) param.gamma = 1.0/numInputDimensions;
160 
161  //Train the model
162  bool trainingResult = trainSVM();
163 
164  if(! trainingResult ){
165  errorLog << "train_(ClassificationData &trainingData) - Failed To Train SVM Model!" << std::endl;
166  return false;
167  }
168 
169  return true;
170 }
171 
172 bool SVM::predict_(VectorFloat &inputVector){
173 
174  if( !trained ){
175  errorLog << "predict_(VectorFloat &inputVector) - The SVM model has not been trained!" << std::endl;
176  return false;
177  }
178 
179  if( inputVector.size() != numInputDimensions ){
180  errorLog << "predict_(VectorFloat &inputVector) - The size of the input vector (" << inputVector.size() << ") does not match the number of features of the model (" << numInputDimensions << ")" << std::endl;
181  return false;
182  }
183 
184  if( param.probability == 1 ){
185  if( !predictSVM( inputVector, maxLikelihood, classLikelihoods ) ){
186  errorLog << "predict(VectorFloat inputVector) - Prediction Failed!" << std::endl;
187  return false;
188  }
189  }else{
190  if( !predictSVM( inputVector ) ){
191  errorLog << "predict(VectorFloat inputVector) - Prediction Failed!" << std::endl;
192  return false;
193  }
194  }
195 
196  return true;
197 }
198 
199 bool SVM::init(UINT kernelType,UINT svmType,bool useScaling,bool useNullRejection,bool useAutoGamma,Float gamma,UINT degree,Float coef0,Float nu,Float C,bool useCrossValidation,UINT kFoldValue){
200 
201  //Clear any previous models or problems
202  clear();
203 
204  //Validate the kernerlType
205  if( !validateKernelType(kernelType) ){
206  errorLog << "init(...) - Unknown kernelType!\n";
207  return false;
208  }
209 
210  if( !validateSVMType(svmType) ){
211  errorLog << "init(...) - Unknown kernelType!\n";
212  return false;
213  }
214 
215  param.svm_type = (int)svmType;
216  param.kernel_type = (int)kernelType;
217  param.degree = (int)degree;
218  param.gamma = gamma;
219  param.coef0 = coef0;
220  param.nu = nu;
221  param.cache_size = 100;
222  param.C = C;
223  param.eps = 1e-3;
224  param.p = 0.1;
225  param.shrinking = 1;
226  param.probability = 1;
227  param.nr_weight = 0;
228  param.weight_label = NULL;
229  param.weight = NULL;
230  this->useScaling = useScaling;
231  this->useCrossValidation = useCrossValidation;
232  this->useNullRejection = useNullRejection;
233  this->useAutoGamma = useAutoGamma;
234  classificationThreshold = 0.5;
235  crossValidationResult = 0;
236 
237  return true;
238 }
239 
240 void SVM::deleteProblemSet(){
241  if( problemSet ){
242  for(int i=0; i<prob.l; i++){
243  delete[] prob.x[i];
244  prob.x[i] = NULL;
245  }
246  delete[] prob.x;
247  delete[] prob.y;
248  prob.l = 0;
249  prob.x = NULL;
250  prob.y = NULL;
251  problemSet = false;
252  }
253 }
254 
256 
257  //Clear any previous models, parameters or probelms
258  clear();
259 
260  //Setup the SVM parameters
261  param.svm_type = C_SVC;
262  param.kernel_type = LINEAR_KERNEL;
263  param.degree = 3;
264  param.gamma = 0;
265  param.coef0 = 0;
266  param.nu = 0.5;
267  param.cache_size = 100;
268  param.C = 1;
269  param.eps = 1e-3;
270  param.p = 0.1;
271  param.shrinking = 1;
272  param.probability = 1;
273  param.nr_weight = 0;
274  param.weight_label = NULL;
275  param.weight = NULL;
276  useCrossValidation = false;
277  kFoldValue = 10;
278  useAutoGamma = true;
279 }
280 
281 bool SVM::validateProblemAndParameters(){
282  //Check the parameters match the problem
283  const char *errorMsg = svm_check_parameter(&prob,&param);
284 
285  if( errorMsg ){
286  errorLog << "validateProblemAndParameters() - Parameters do not match problem!" << std::endl;
287  return false;
288  }
289 
290  return true;
291 }
292 
293 bool SVM::trainSVM(){
294 
295  crossValidationResult = 0;
296 
297  //Erase any previous models
298  if( trained ){
299  svm_free_and_destroy_model(&model);
300  trained = false;
301  }
302 
303  //Check to make sure the problem has been set
304  if( !problemSet ){
305  errorLog << "trainSVM() - Problem not set!" << std::endl;
306  return false;
307  }
308 
309  //Verify the problem and the parameters
310  if( !validateProblemAndParameters() ) return false;
311 
312  //Scale the training data if needed
313  if( useScaling ){
314  for(int i=0; i<prob.l; i++)
315  for(UINT j=0; j<numInputDimensions; j++)
316  prob.x[i][j].value = grt_scale(prob.x[i][j].value,ranges[j].minValue,ranges[j].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
317  }
318 
319  if( useCrossValidation ){
320  int i;
321  Float total_correct = 0;
322  Float total_error = 0;
323  Float sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
324  Float *target = new Float[prob.l];
325 
326  svm_cross_validation(&prob,&param,kFoldValue,target);
327  if( param.svm_type == EPSILON_SVR || param.svm_type == NU_SVR )
328  {
329  for(i=0;i<prob.l;i++)
330  {
331  Float y = prob.y[i];
332  Float v = target[i];
333  total_error += (v-y)*(v-y);
334  sumv += v;
335  sumy += y;
336  sumvv += v*v;
337  sumyy += y*y;
338  sumvy += v*y;
339  }
340  crossValidationResult = total_error/prob.l;
341  }
342  else
343  {
344  for(i=0;i<prob.l;i++){
345  if(target[i] == prob.y[i]){
346  ++total_correct;
347  }
348  }
349  crossValidationResult = total_correct/prob.l*100.0;
350  }
351  delete[] target;
352  }
353 
354  //Train the SVM - if we are running cross validation then the CV will be run first followed by a full train
355  model = svm_train(&prob,&param);
356 
357  if( model == NULL ){
358  errorLog << "trainSVM() - Failed to train SVM Model!" << std::endl;
359  return false;
360  }
361 
362  if( model != NULL ){
363  trained = true;
364  numClasses = getNumClasses();
365  classLabels.resize( getNumClasses() );
366  for(UINT k=0; k<getNumClasses(); k++){
367  classLabels[k] = model->label[k];
368  }
369  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
370  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
371  }
372 
373  return trained;
374 }
375 
376 bool SVM::predictSVM(VectorFloat &inputVector){
377 
378  if( !trained || inputVector.size() != numInputDimensions ) return false;
379 
380  svm_node *x = NULL;
381 
382  //Copy the input data into the SVM format
383  x = new svm_node[numInputDimensions+1];
384  for(UINT j=0; j<numInputDimensions; j++){
385  x[j].index = (int)j+1;
386  x[j].value = inputVector[j];
387  }
388  //The last value in the input vector must be set to -1
389  x[numInputDimensions].index = -1;
390  x[numInputDimensions].value = 0;
391 
392  //Scale the input data if required
393  if( useScaling ){
394  for(UINT i=0; i<numInputDimensions; i++)
395  x[i].value = grt_scale(x[i].value,ranges[i].minValue,ranges[i].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
396  }
397 
398  //Perform the SVM prediction
399  Float predict_label = svm_predict(model,x);
400 
401  //We can't do null rejection without the probabilities, so just set the predicted class
402  predictedClassLabel = (UINT)predict_label;
403 
404  //Clean up the memory
405  delete[] x;
406 
407  return true;
408 }
409 
410 bool SVM::predictSVM(VectorFloat &inputVector,Float &maxProbability, VectorFloat &probabilites){
411 
412  if( !trained || param.probability == 0 || inputVector.size() != numInputDimensions ) return false;
413 
414  Float *prob_estimates = NULL;
415  svm_node *x = NULL;
416 
417  //Setup the memory for the probability estimates
418  prob_estimates = new Float[ model->nr_class ];
419 
420  //Copy the input data into the SVM format
421  x = new svm_node[numInputDimensions+1];
422  for(UINT j=0; j<numInputDimensions; j++){
423  x[j].index = (int)j+1;
424  x[j].value = inputVector[j];
425  }
426  //The last value in the input vector must be set to -1
427  x[numInputDimensions].index = -1;
428  x[numInputDimensions].value = 0;
429 
430  //Scale the input data if required
431  if( useScaling ){
432  for(UINT j=0; j<numInputDimensions; j++)
433  x[j].value = grt_scale(x[j].value,ranges[j].minValue,ranges[j].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
434  }
435 
436  //Perform the SVM prediction
437  Float predict_label = svm_predict_probability(model,x,prob_estimates);
438 
439  predictedClassLabel = 0;
440  maxProbability = 0;
441  probabilites.resize(model->nr_class);
442  for(int k=0; k<model->nr_class; k++){
443  if( maxProbability < prob_estimates[k] ){
444  maxProbability = prob_estimates[k];
445  predictedClassLabel = k+1;
446  maxLikelihood = maxProbability;
447  }
448  probabilites[k] = prob_estimates[k];
449  }
450 
451  if( !useNullRejection ) predictedClassLabel = (UINT)predict_label;
452  else{
453  if( maxProbability >= classificationThreshold ){
454  predictedClassLabel = (UINT)predict_label;
455  }else predictedClassLabel = GRT_DEFAULT_NULL_CLASS_LABEL;
456  }
457 
458  //Clean up the memory
459  delete[] prob_estimates;
460  delete[] x;
461 
462  return true;
463 }
464 
465 bool SVM::convertClassificationDataToLIBSVMFormat(ClassificationData &trainingData){
466 
467  //clear any previous problems
468  deleteProblemSet();
469 
470  const UINT numTrainingExamples = trainingData.getNumSamples();
471  numInputDimensions = trainingData.getNumDimensions();
472 
473  //Compute the ranges encase the data should be scaled
474  ranges = trainingData.getRanges();
475 
476  //Init the memory
477  prob.l = numTrainingExamples;
478  prob.x = new svm_node*[numTrainingExamples];
479  prob.y = new Float[numTrainingExamples];
480  problemSet = true;
481 
482  for(UINT i=0; i<numTrainingExamples; i++){
483  //Set the class ID
484  prob.y[i] = trainingData[i].getClassLabel();
485 
486  //Assign the memory for this training example, note that a dummy node is needed at the end of the vector
487  prob.x[i] = new svm_node[numInputDimensions+1];
488  for(UINT j=0; j<numInputDimensions; j++){
489  prob.x[i][j].index = j+1;
490  prob.x[i][j].value = trainingData[i].getSample()[j];
491  }
492  prob.x[i][numInputDimensions].index = -1; //Assign the final node value
493  prob.x[i][numInputDimensions].value = 0;
494  }
495 
496  return true;
497 }
498 
499 bool SVM::saveModelToFile( std::fstream &file ) const{
500 
501  if( !file.is_open() ){
502  return false;
503  }
504 
505  file << "SVM_MODEL_FILE_V2.0\n";
506 
507  //Write the classifier settings to the file
509  errorLog <<"saveModelToFile(fstream &file) - Failed to save classifier base settings to file!" << std::endl;
510  return false;
511  }
512 
513  const svm_parameter& param = trained ? model->param : this->param;
514 
515  file << "ModelType: ";
516  switch( param.svm_type ){
517  case(C_SVC):
518  file << "C_SVC";
519  break;
520  case(NU_SVC):
521  file << "NU_SVC";
522  break;
523  case(ONE_CLASS):
524  file << "ONE_CLASS";
525  break;
526  case(EPSILON_SVR):
527  file << "EPSILON_SVR";
528  break;
529  case(NU_SVR):
530  file << "NU_SVR";
531  break;
532  default:
533  errorLog << "saveModelToFile(fstream &file) - Invalid model type: " << param.svm_type << std::endl;
534  return false;
535  break;
536  }
537  file << std::endl;
538 
539  file << "KernelType: ";
540  switch(param.kernel_type){
541  case(LINEAR):
542  file << "LINEAR";
543  break;
544  case(POLY):
545  file << "POLYNOMIAL";
546  break;
547  case(RBF):
548  file << "RBF";
549  break;
550  case(SIGMOID):
551  file << "SIGMOID";
552  break;
553  case(PRECOMPUTED):
554  file << "PRECOMPUTED";
555  break;
556  default:
557  errorLog << "saveModelToFile(fstream &file) - Invalid kernel type: " << param.kernel_type << std::endl;
558  return false;
559  break;
560  }
561  file << std::endl;
562  file << "Degree: " << param.degree << std::endl;
563  file << "Gamma: " << param.gamma << std::endl;
564  file << "Coef0: " << param.coef0 << std::endl;
565  file << "NumberOfFeatures: " << numInputDimensions << std::endl;
566  file << "UseShrinking: " << param.shrinking << std::endl;
567  file << "UseProbability: " << param.probability << std::endl;
568 
569  if( trained ){
570  UINT numClasses = (UINT)model->nr_class;
571  UINT numSV = (UINT)model->l;
572  file << "NumberOfSupportVectors: " << numSV << std::endl;
573 
574  file << "RHO: \n";
575  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->rho[i] << "\t";
576  file << "\n";
577 
578  if(model->label){
579  file << "Label: \n";
580  for(UINT i=0;i<numClasses;i++) file << model->label[i] << "\t";
581  file << "\n";
582  }
583 
584  if(model->probA){ // regression has probA only
585  file << "ProbA: \n";
586  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->probA[i] << "\t";
587  file << "\n";
588  }
589 
590  if(model->probB){
591  file << "ProbB: \n";
592  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->probB[i] << "\t";
593  file << "\n";
594  }
595 
596  if(model->nSV){
597  file << "NumSupportVectorsPerClass: \n";
598  for(UINT i=0;i<numClasses;i++) file << model->nSV[i] << "\t";
599  file << "\n";
600  }
601 
602  file << "SupportVectors: \n";
603 
604  const Float * const *sv_coef = model->sv_coef;
605  const svm_node * const *SV = model->SV;
606 
607  for(UINT i=0;i<numSV;i++){
608  for(UINT j=0;j<numClasses-1;j++)
609  file << sv_coef[j][i] << "\t";
610 
611  const svm_node *p = SV[i];
612 
613  if(param.kernel_type == PRECOMPUTED) file << (int) p->value << "\t";
614  else{
615  while(p->index != -1){
616  file << p->index << "\t" << p->value << "\t";
617  p++;
618  }
619  file << "\n";
620  }
621  }
622  }
623 
624  return true;
625 }
626 
627 bool SVM::loadModelFromFile( std::fstream &file ){
628 
629  std::string word;
630  UINT numSV = 0;
631  UINT halfNumClasses = 0;
632 
633  //Clear any previous models, parameters or problems
634  clear();
635 
636  if( !file.is_open() ){
637  errorLog << "loadModelFromFile(fstream &file) - The file is not open!" << std::endl;
638  return false;
639  }
640 
641  //Read the file header
642  file >> word;
643 
644  //Check to see if we should load a legacy file
645  if( word == "SVM_MODEL_FILE_V1.0" ){
646  return loadLegacyModelFromFile( file );
647  }
648 
649  //Check to make sure this is a file with the correct File Format
650  if( word != "SVM_MODEL_FILE_V2.0" ){
651  errorLog << "loadModelFromFile(fstream &file) - Invalid file format!" << std::endl;
652  clear();
653  return false;
654  }
655 
656  //Load the base settings from the file
658  errorLog << "loadModelFromFile(string filename) - Failed to load base settings from file!" << std::endl;
659  return false;
660  }
661 
662  //Init the memory for the model
663  model = new svm_model;
664  model->nr_class = 0;
665  model->l = 0;
666  model->SV = NULL;
667  model->sv_coef = NULL;
668  model->rho = NULL;
669  model->probA = NULL;
670  model->probB = NULL;
671  model->label = NULL;
672  model->nSV = NULL;
673  model->label = NULL;
674  model->nSV = NULL;
675  model->free_sv = 0; //This will be set to 1 if everything is loaded OK
676 
677  //Init the memory for the parameters
678  model->param.svm_type = 0;
679  model->param.kernel_type = 0;
680  model->param.degree = 0;
681  model->param.gamma = 0;
682  model->param.coef0 = 0;
683  model->param.cache_size = 0;
684  model->param.eps = 0;
685  model->param.C = 0;
686  model->param.nr_weight = 0;
687  model->param.weight_label = NULL;
688  model->param.weight = NULL;
689  model->param.nu = 0;
690  model->param.p = 0;
691  model->param.shrinking = 0;
692  model->param.probability = 1;
693 
694  //Load the model type
695  file >> word;
696  if(word != "ModelType:"){
697  errorLog << "loadModelFromFile(fstream &file) - Failed to find ModelType header!" << std::endl;
698  clear();
699  return false;
700  }
701  file >> word;
702  if( word == "C_SVC" ){
703  model->param.svm_type = C_SVC;
704  }else{
705  if( word == "NU_SVC" ){
706  model->param.svm_type = NU_SVC;
707  }else{
708  if( word == "ONE_CLASS" ){
709  model->param.svm_type = ONE_CLASS;
710  }else{
711  if( word == "EPSILON_SVR" ){
712  model->param.svm_type = EPSILON_SVR;
713  }else{
714  if( word == "NU_SVR" ){
715  model->param.svm_type = NU_SVR;
716  }else{
717  errorLog << "loadModelFromFile(fstream &file) - Failed to find SVM type!" << std::endl;
718  clear();
719  return false;
720  }
721  }
722  }
723  }
724  }
725 
726  //Load the model type
727  file >> word;
728  if(word != "KernelType:"){
729  errorLog << "loadModelFromFile(fstream &file) - Failed to find kernel type!" << std::endl;
730  clear();
731  return false;
732  }
733  file >> word;
734  if( word == "LINEAR" ){
735  model->param.kernel_type = LINEAR;
736  }else{
737  if( word == "POLYNOMIAL" ){
738  model->param.kernel_type = POLY;
739  }else{
740  if( word == "RBF" ){
741  model->param.kernel_type = RBF;
742  }else{
743  if( word == "SIGMOID" ){
744  model->param.kernel_type = SIGMOID;
745  }else{
746  if( word == "PRECOMPUTED" ){
747  model->param.kernel_type = PRECOMPUTED;
748  }else{
749  errorLog << "loadModelFromFile(fstream &file) - Failed to find kernel type!" << std::endl;
750  clear();
751  return false;
752  }
753  }
754  }
755  }
756  }
757 
758  //Load the degree
759  file >> word;
760  if(word != "Degree:"){
761  errorLog << "loadModelFromFile(fstream &file) - Failed to find Degree header!" << std::endl;
762  clear();
763  return false;
764  }
765  file >> model->param.degree;
766 
767  //Load the gamma
768  file >> word;
769  if(word != "Gamma:"){
770  errorLog << "loadModelFromFile(fstream &file) - Failed to find Gamma header!" << std::endl;
771  clear();
772  return false;
773  }
774  file >> model->param.gamma;
775 
776  //Load the Coef0
777  file >> word;
778  if(word != "Coef0:"){
779  errorLog << "loadModelFromFile(fstream &file) - Failed to find Coef0 header!" << std::endl;
780  clear();
781  return false;
782  }
783  file >> model->param.coef0;
784 
785  //Load the NumberOfFeatures
786  file >> word;
787  if(word != "NumberOfFeatures:"){
788  errorLog << "loadModelFromFile(fstream &file) - Failed to find NumberOfFeatures header!" << std::endl;
789  clear();
790  return false;
791  }
792  file >> numInputDimensions;
793 
794  //Load the UseShrinking
795  file >> word;
796  if(word != "UseShrinking:"){
797  errorLog << "loadModelFromFile(fstream &file) - Failed to find UseShrinking header!" << std::endl;
798  clear();
799  return false;
800  }
801  file >> model->param.shrinking;
802 
803  //Load the UseProbability
804  file >> word;
805  if(word != "UseProbability:"){
806  errorLog << "loadModelFromFile(fstream &file) - Failed to find UseProbability header!" << std::endl;
807  clear();
808  return false;
809  }
810  file >> model->param.probability;
811 
812  if( trained ){
813  //Load the NumberOfSupportVectors
814  file >> word;
815  if(word != "NumberOfSupportVectors:"){
816  errorLog << "loadModelFromFile(fstream &file) - Failed to find NumberOfSupportVectors header!" << std::endl;
817  clear();
818  return false;
819  }
820  file >> numSV;
821 
822  //Setup the values
823  halfNumClasses = numClasses*(numClasses-1)/2;
824  model->nr_class = numClasses;
825  model->l = numSV;
826 
827  //Load the RHO
828  file >> word;
829  if(word != "RHO:"){
830  errorLog << "loadModelFromFile(fstream &file) - Failed to find RHO header!" << std::endl;
831  clear();
832  return false;
833  }
834  model->rho = new Float[ halfNumClasses ];
835  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->rho[i];
836 
837  //See if we can load the Labels
838  file >> word;
839  if(word != "Label:"){
840  model->label = NULL;
841  }else{
842  model->label = new int[ numClasses ];
843  for(UINT i=0;i<numClasses;i++) file >> model->label[i];
844  //We only need to read a new line if we found the label!
845  file >> word;
846  }
847 
848  //See if we can load the ProbA
849  //We don't need to read another line here
850  if(word != "ProbA:"){
851  model->probA = NULL;
852  }else{
853  model->probA = new Float[ halfNumClasses ];
854  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probA[i];
855  //We only need to read a new line if we found the label!
856  file >> word;
857  }
858 
859  //See if we can load the ProbB
860  //We don't need to read another line here
861  if(word != "ProbB:"){
862  model->probB = NULL;
863  }else{
864  model->probB = new Float[ halfNumClasses ];
865  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probB[i];
866  //We only need to read a new line if we found the label!
867  file >> word;
868  }
869 
870  //See if we can load the NumSupportVectorsPerClass
871  //We don't need to read another line here
872  if( word == "NumSupportVectorsPerClass:" ){
873  model->nSV = new int[ numClasses ];
874  for(UINT i=0; i<numClasses; i++) file >> model->nSV[i];
875  //We only need to read a new line if we found the label!
876  file >> word;
877  }else{
878  model->nSV = NULL;
879  }
880 
881  //Load the SupportVectors
882  //We don't need to read another line here
883  if(word != "SupportVectors:"){
884  errorLog << "loadModelFromFile(fstream &file) - Failed to find SupportVectors header!" << std::endl;
885  clear();
886  return false;
887  }
888 
889  //Setup the memory
890  model->sv_coef = new Float*[numClasses-1];
891  for(UINT j=0;j<numClasses-1;j++) model->sv_coef[j] = new Float[numSV];
892  model->SV = new svm_node*[numSV];
893 
894  for(UINT i=0; i<numSV; i++){
895  for(UINT j=0; j<numClasses-1; j++){
896  file >> model->sv_coef[j][i];
897  }
898 
899  model->SV[i] = new svm_node[numInputDimensions+1];
900 
901  if(model->param.kernel_type == PRECOMPUTED) file >> model->SV[i][0].value;
902  else{
903  for(UINT j=0; j<numInputDimensions; j++){
904  file >> model->SV[i][j].index;
905  file >> model->SV[i][j].value;
906  }
907  model->SV[i][numInputDimensions].index = -1; //Assign the final node value
908  model->SV[i][numInputDimensions].value = 0;
909  }
910  }
911 
912  //Set the class labels
913  this->numClasses = getNumClasses();
914  classLabels.resize(getNumClasses());
915  for(UINT k=0; k<getNumClasses(); k++){
916  classLabels[k] = model->label[k];
917  }
918 
919  //The SV have now been loaded so flag that they should be deleted
920  model->free_sv = 1;
921 
922  //Resize the prediction results to make sure it is setup for realtime prediction
923  maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
924  bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
925  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
926  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
927  }
928 
929  return true;
930 }
931 
932 bool SVM::clear(){
933 
934  //Clear the base class
936 
937  crossValidationResult = 0;
938  trained = false;
939  svm_free_and_destroy_model(&model);
940  svm_destroy_param(&param);
941  deleteProblemSet();
942 
943  return true;
944 }
945 
947  return useCrossValidation;
948 }
949 
951  return useAutoGamma;
952 }
953 
954 std::string SVM::getSVMType() const{
955 
956  const struct svm_parameter *paramPtr = NULL;
957  std::string modelName = "UNKNOWN";
958  if( trained ){
959  paramPtr = &model->param;
960  }else paramPtr = &param;
961 
962  switch(paramPtr->svm_type){
963  case(C_SVC):
964  modelName = "C_SVC";
965  break;
966  case(NU_SVC):
967  modelName = "NU_SVC";
968  break;
969  case(ONE_CLASS):
970  modelName = "ONE_CLASS";
971  break;
972  case(EPSILON_SVR):
973  modelName = "EPSILON_SVR";
974  break;
975  case(NU_SVR):
976  modelName = "NU_SVR";
977  break;
978  default:
979  break;
980  }
981 
982  return modelName;
983 }
984 
985 std::string SVM::getKernelType() const{
986  const struct svm_parameter *paramPtr = NULL;
987  std::string modelName = "UNKNOWN";
988  if( trained ){
989  paramPtr = &model->param;
990  }else paramPtr = &param;
991 
992  switch(paramPtr->kernel_type){
993  case(LINEAR_KERNEL):
994  modelName = "LINEAR_KERNEL";
995  break;
996  case(POLY_KERNEL):
997  modelName = "POLY_KERNEL";
998  break;
999  case(RBF_KERNEL):
1000  modelName = "RBF_KERNEL";
1001  break;
1002  case(SIGMOID_KERNEL):
1003  modelName = "SIGMOID_KERNEL";
1004  break;
1005  case(PRECOMPUTED_KERNEL):
1006  modelName = "PRECOMPUTED_KERNEL";
1007  break;
1008  default:
1009  break;
1010  }
1011  return modelName;
1012 }
1013 
1014 UINT SVM::getNumClasses() const{
1015  if( !trained ) return 0;
1016  return (UINT) model->nr_class;
1017 }
1018 
1019 UINT SVM::getDegree() const{
1020  if( trained ){
1021  return (UINT)model->param.degree;
1022  }
1023  return (UINT)param.gamma;
1024 }
1025 
1026 Float SVM::getGamma() const{
1027  if( trained ){
1028  return model->param.gamma;
1029  }
1030  return param.gamma;
1031 }
1032 
1033 Float SVM::getNu() const{
1034  if( trained ){
1035  return model->param.nu;
1036  }
1037  return param.gamma;
1038 }
1039 
1040 Float SVM::getCoef0() const{
1041  if( trained ){
1042  return model->param.coef0;
1043  }
1044  return param.gamma;
1045 }
1046 
1047 Float SVM::getC() const{
1048  if( trained ){
1049  return model->param.C;
1050  }
1051  return param.gamma;
1052 }
1053 
1054 Float SVM::getCrossValidationResult() const{ return crossValidationResult; }
1055 
1056 bool SVM::setSVMType(const UINT svmType){
1057  if( validateSVMType(svmType) ){
1058  param.svm_type = (int)svmType;
1059  return true;
1060  }
1061  return false;
1062 }
1063 
1064 bool SVM::setKernelType(const UINT kernelType){
1065  if( validateKernelType(kernelType) ){
1066  param.kernel_type = (int)kernelType;
1067  return true;
1068  }
1069  warningLog << "setKernelType(UINT kernelType) - Failed to set kernel type, unknown kernelType!" << std::endl;
1070  return false;
1071 }
1072 
1073 bool SVM::setGamma(const Float gamma){
1074  if( !useAutoGamma ){
1075  this->param.gamma = gamma;
1076  return true;
1077  }
1078  warningLog << "setGamma(Float gamma) - Failed to set gamma, useAutoGamma is enabled, setUseAutoGamma to false first!" << std::endl;
1079  return false;
1080 }
1081 
1082 bool SVM::setDegree(const UINT degree){
1083  this->param.degree = (int)degree;
1084  return true;
1085 }
1086 
1087 bool SVM::setNu(const Float nu){
1088  this->param.nu = nu;
1089  return true;
1090 }
1091 
1092 bool SVM::setCoef0(const Float coef0){
1093  this->param.coef0 = coef0;
1094  return true;
1095 }
1096 
1097 bool SVM::setC(const Float C){
1098  this->param.C = C;
1099  return true;
1100 }
1101 
1102 bool SVM::setKFoldCrossValidationValue(const UINT kFoldValue){
1103  if( kFoldValue > 0 ){
1104  this->kFoldValue = kFoldValue;
1105  return true;
1106  }
1107  warningLog << "setKFoldCrossValidationValue(const UINT kFoldValue) - Failed to set kFoldValue, the kFoldValue must be greater than 0!" << std::endl;
1108  return false;
1109 }
1110 
1111 bool SVM::enableAutoGamma(const bool useAutoGamma){
1112  this->useAutoGamma = useAutoGamma;
1113  return true;
1114 }
1115 
1116 bool SVM::enableCrossValidationTraining(const bool useCrossValidation){
1117  this->useCrossValidation = useCrossValidation;
1118  return true;
1119 }
1120 
1121 bool SVM::validateSVMType(const UINT svmType){
1122  if( svmType == C_SVC ){
1123  return true;
1124  }
1125  if( svmType == NU_SVC ){
1126  return true;
1127  }
1128  if( svmType == ONE_CLASS ){
1129  return true;
1130  }
1131  if( svmType == EPSILON_SVR ){
1132  return true;
1133  }
1134  if( svmType == NU_SVR ){
1135  return true;
1136  }
1137  return false;
1138 }
1139 
1140 bool SVM::validateKernelType(const UINT kernelType){
1141  if( kernelType == LINEAR_KERNEL ){
1142  return true;
1143  }
1144  if( kernelType == POLY_KERNEL ){
1145  return true;
1146  }
1147  if( kernelType == RBF_KERNEL ){
1148  return true;
1149  }
1150  if( kernelType == SIGMOID_KERNEL ){
1151  return true;
1152  }
1153  if( kernelType == PRECOMPUTED_KERNEL ){
1154  return true;
1155  }
1156  return false;
1157 }
1158 
1159 struct svm_model* SVM::deepCopyModel() const{
1160 
1161  if( model == NULL ) return NULL;
1162 
1163  UINT halfNumClasses = 0;
1164 
1165  //Init the memory for the model
1166  struct svm_model *m = new svm_model;
1167  m->nr_class = 0;
1168  m->l = 0;
1169  m->SV = NULL;
1170  m->sv_coef = NULL;
1171  m->rho = NULL;
1172  m->probA = NULL;
1173  m->probB = NULL;
1174  m->label = NULL;
1175  m->nSV = NULL;
1176  m->label = NULL;
1177  m->nSV = NULL;
1178  m->free_sv = 0; //This will be set to 1 if everything is loaded OK
1179 
1180  //Init the memory for the parameters
1181  m->param.svm_type = 0;
1182  m->param.kernel_type = 0;
1183  m->param.degree = 0;
1184  m->param.gamma = 0;
1185  m->param.coef0 = 0;
1186  m->param.cache_size = 0;
1187  m->param.eps = 0;
1188  m->param.C = 0;
1189  m->param.nr_weight = 0;
1190  m->param.weight_label = NULL;
1191  m->param.weight = NULL;
1192  m->param.nu = 0;
1193  m->param.p = 0;
1194  m->param.shrinking = 0;
1195  m->param.probability = 1;
1196 
1197  //Copy the parameters
1198  m->param.svm_type = model->param.svm_type;
1199  m->param.kernel_type = model->param.kernel_type ;
1200  m->param.degree = model->param.degree;
1201  m->param.gamma = model->param.gamma;
1202  m->param.coef0 = model->param.coef0;
1203  m->nr_class = model->nr_class;
1204  m->l = model->l;
1205  m->param.shrinking = model->param.shrinking;
1206  m->param.probability = model->param.probability;
1207 
1208  //Setup the values
1209  halfNumClasses = model->nr_class*(model->nr_class-1)/2;
1210 
1211  m->rho = new Float[ halfNumClasses ];
1212  for(int i=0;i <model->nr_class*(model->nr_class-1)/2; i++) m->rho[i] = model->rho[i];
1213 
1214  if( model->label != NULL ){
1215  m->label = new int[ model->nr_class ];
1216  for(int i=0;i<model->nr_class;i++) m->label[i] = model->label[i];
1217  }
1218 
1219  if( model->probA != NULL ){
1220  m->probA = new Float[ halfNumClasses ];
1221  for(UINT i=0;i<halfNumClasses; i++) m->probA[i] = model->probA[i];
1222  }
1223 
1224  if( model->probB != NULL ){
1225  m->probB = new Float[ halfNumClasses ];
1226  for(UINT i=0; i<halfNumClasses; i++) m->probB[i] = model->probB[i];
1227  }
1228 
1229  if( model->nSV != NULL ){
1230  m->nSV = new int[ model->nr_class ];
1231  for(int i=0; i<model->nr_class; i++) m->nSV[i] = model->nSV[i];
1232  }
1233 
1234  //Setup the memory
1235  m->sv_coef = new Float*[numClasses-1];
1236  for(UINT j=0;j<numClasses-1;j++) m->sv_coef[j] = new Float[model->l];
1237  m->SV = new svm_node*[model->l];
1238 
1239  for(int i=0; i<model->l; i++){
1240  for(int j=0; j<model->nr_class-1; j++){
1241  m->sv_coef[j][i] = model->sv_coef[j][i];
1242  }
1243 
1244  m->SV[i] = new svm_node[numInputDimensions+1];
1245 
1246  if(model->param.kernel_type == PRECOMPUTED) m->SV[i][0].value = model->SV[i][0].value;
1247  else{
1248  for(UINT j=0; j<numInputDimensions; j++){
1249  m->SV[i][j].index = model->SV[i][j].index;
1250  m->SV[i][j].value = model->SV[i][j].value;
1251  }
1252  m->SV[i][numInputDimensions].index = -1; //Assign the final node value
1253  m->SV[i][numInputDimensions].value = 0;
1254  }
1255  }
1256 
1257  //The SV have now been loaded so flag that they should be deleted
1258  m->free_sv = 1;
1259 
1260  return m;
1261 }
1262 
1263 bool SVM::deepCopyProblem( const struct svm_problem &source, struct svm_problem &target, const unsigned int numInputDimensions ) const{
1264 
1265  //Cleanup the target memory
1266  if( target.y != NULL ){
1267  delete[] target.y;
1268  target.y = NULL;
1269  }
1270  if( target.x != NULL ){
1271  for(int i=0; i<target.l; i++){
1272  delete[] target.x[i];
1273  target.x[i] = NULL;
1274  }
1275  }
1276 
1277  //Deep copy the source to the target
1278  target.l = source.l;
1279 
1280  if( source.x != NULL ){
1281  target.x = new svm_node*[ target.l ];
1282  for(int i=0; i<target.l; i++){
1283  target.x[i] = new svm_node[ numInputDimensions+1 ];
1284  for(unsigned int j=0; j<numInputDimensions+1; j++){
1285  target.x[i][j] = source.x[i][j];
1286  }
1287  }
1288  }
1289 
1290  if( source.y != NULL ){
1291  target.y = new Float[ target.l ];
1292  for(int i=0; i<target.l; i++){
1293  target.y[i] = source.y[i];
1294  }
1295  }
1296 
1297  return true;
1298 }
1299 
1300 bool SVM::deepCopyParam( const svm_parameter &source_param, svm_parameter &target_param ) const{
1301 
1302  //Cleanup any dynamic memory in the target
1303  if( target_param.weight_label != NULL ){
1304  delete[] target_param.weight_label;
1305  target_param.weight_label = NULL;
1306  }
1307  if( target_param.weight != NULL ){
1308  delete[] target_param.weight;
1309  target_param.weight = NULL;
1310  }
1311 
1312  //Copy the non dynamic variables
1313  target_param.svm_type = source_param.svm_type;
1314  target_param.kernel_type = source_param.kernel_type;
1315  target_param.degree = source_param.degree;
1316  target_param.gamma = source_param.gamma;
1317  target_param.coef0 = source_param.coef0;
1318  target_param.cache_size = source_param.cache_size;
1319  target_param.eps = source_param.eps;
1320  target_param.C = source_param.C;
1321  target_param.nr_weight = source_param.nr_weight;
1322  target_param.nu = source_param.nu;
1323  target_param.p = source_param.p;
1324  target_param.shrinking = source_param.shrinking;
1325  target_param.probability = source_param.probability;
1326 
1327  //Copy any dynamic memory
1328  if( source_param.weight_label != NULL ){
1329 
1330  }
1331  if( source_param.weight != NULL ){
1332 
1333  }
1334 
1335  return true;
1336 }
1337 
1338 bool SVM::loadLegacyModelFromFile( std::fstream &file ){
1339 
1340  std::string word;
1341 
1342  UINT numSV = 0;
1343  UINT halfNumClasses = 0;
1344  numInputDimensions = 0;
1345 
1346  //Init the memory for the model
1347  model = new svm_model;
1348  model->nr_class = 0;
1349  model->l = 0;
1350  model->SV = NULL;
1351  model->sv_coef = NULL;
1352  model->rho = NULL;
1353  model->probA = NULL;
1354  model->probB = NULL;
1355  model->label = NULL;
1356  model->nSV = NULL;
1357  model->label = NULL;
1358  model->nSV = NULL;
1359  model->free_sv = 0; //This will be set to 1 if everything is loaded OK
1360 
1361  //Init the memory for the parameters
1362  model->param.svm_type = 0;
1363  model->param.kernel_type = 0;
1364  model->param.degree = 0;
1365  model->param.gamma = 0;
1366  model->param.coef0 = 0;
1367  model->param.cache_size = 0;
1368  model->param.eps = 0;
1369  model->param.C = 0;
1370  model->param.nr_weight = 0;
1371  model->param.weight_label = NULL;
1372  model->param.weight = NULL;
1373  model->param.nu = 0;
1374  model->param.p = 0;
1375  model->param.shrinking = 0;
1376  model->param.probability = 1;
1377 
1378  //Load the model type
1379  file >> word;
1380  if(word != "ModelType:"){
1381  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find ModelType header!" << std::endl;
1382  clear();
1383  return false;
1384  }
1385  file >> word;
1386  if( word == "C_SVC" ){
1387  model->param.svm_type = C_SVC;
1388  }else{
1389  if( word == "NU_SVC" ){
1390  model->param.svm_type = NU_SVC;
1391  }else{
1392  if( word == "ONE_CLASS" ){
1393  model->param.svm_type = ONE_CLASS;
1394  }else{
1395  if( word == "EPSILON_SVR" ){
1396  model->param.svm_type = EPSILON_SVR;
1397  }else{
1398  if( word == "NU_SVR" ){
1399  model->param.svm_type = NU_SVR;
1400  }else{
1401  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find SVM type!" << std::endl;
1402  clear();
1403  return false;
1404  }
1405  }
1406  }
1407  }
1408  }
1409 
1410  //Load the model type
1411  file >> word;
1412  if(word != "KernelType:"){
1413  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find kernel type!" << std::endl;
1414  clear();
1415  return false;
1416  }
1417  file >> word;
1418  if( word == "LINEAR" ){
1419  model->param.kernel_type = LINEAR;
1420  }else{
1421  if( word == "POLYNOMIAL" ){
1422  model->param.kernel_type = POLY;
1423  }else{
1424  if( word == "RBF" ){
1425  model->param.kernel_type = RBF;
1426  }else{
1427  if( word == "SIGMOID" ){
1428  model->param.kernel_type = SIGMOID;
1429  }else{
1430  if( word == "PRECOMPUTED" ){
1431  model->param.kernel_type = PRECOMPUTED;
1432  }else{
1433  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find kernel type!" << std::endl;
1434  clear();
1435  return false;
1436  }
1437  }
1438  }
1439  }
1440  }
1441 
1442  //Load the degree
1443  file >> word;
1444  if(word != "Degree:"){
1445  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find Degree header!" << std::endl;
1446  clear();
1447  return false;
1448  }
1449  file >> model->param.degree;
1450 
1451  //Load the gamma
1452  file >> word;
1453  if(word != "Gamma:"){
1454  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find Gamma header!" << std::endl;
1455  clear();
1456  return false;
1457  }
1458  file >> model->param.gamma;
1459 
1460  //Load the Coef0
1461  file >> word;
1462  if(word != "Coef0:"){
1463  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find Coef0 header!" << std::endl;
1464  clear();
1465  return false;
1466  }
1467  file >> model->param.coef0;
1468 
1469  //Load the NumberOfClasses
1470  file >> word;
1471  if(word != "NumberOfClasses:"){
1472  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfClasses header!" << std::endl;
1473  clear();
1474  return false;
1475  }
1476  file >> numClasses;
1477 
1478  //Load the NumberOfSupportVectors
1479  file >> word;
1480  if(word != "NumberOfSupportVectors:"){
1481  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfSupportVectors header!" << std::endl;
1482  clear();
1483  return false;
1484  }
1485  file >> numSV;
1486 
1487  //Load the NumberOfFeatures
1488  file >> word;
1489  if(word != "NumberOfFeatures:"){
1490  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find NumberOfFeatures header!" << std::endl;
1491  clear();
1492  return false;
1493  }
1494  file >> numInputDimensions;
1495 
1496  //Load the UseShrinking
1497  file >> word;
1498  if(word != "UseShrinking:"){
1499  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find UseShrinking header!" << std::endl;
1500  clear();
1501  return false;
1502  }
1503  file >> model->param.shrinking;
1504 
1505  //Load the UseProbability
1506  file >> word;
1507  if(word != "UseProbability:"){
1508  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find UseProbability header!" << std::endl;
1509  clear();
1510  return false;
1511  }
1512  file >> model->param.probability;
1513 
1514  //Load the UseScaling
1515  file >> word;
1516  if(word != "UseScaling:"){
1517  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find UseScaling header!" << std::endl;
1518  clear();
1519  return false;
1520  }
1521  file >> useScaling;
1522 
1523  //Load the Ranges
1524  file >> word;
1525  if(word != "Ranges:"){
1526  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find Ranges header!" << std::endl;
1527  clear();
1528  return false;
1529  }
1530  //Setup the memory for the ranges
1531  ranges.clear();
1532  ranges.resize(numInputDimensions);
1533 
1535  for(UINT i=0; i<ranges.size(); i++){
1536  file >> ranges[i].minValue;
1537  file >> ranges[i].maxValue;
1538  }
1539 
1540  //Setup the values
1541  halfNumClasses = numClasses*(numClasses-1)/2;
1542  model->nr_class = numClasses;
1543  model->l = numSV;
1544 
1545  //Load the RHO
1546  file >> word;
1547  if(word != "RHO:"){
1548  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find RHO header!" << std::endl;
1549  clear();
1550  return false;
1551  }
1552  model->rho = new Float[ halfNumClasses ];
1553  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->rho[i];
1554 
1555  //See if we can load the Labels
1556  file >> word;
1557  if(word != "Label:"){
1558  model->label = NULL;
1559  }else{
1560  model->label = new int[ numClasses ];
1561  for(UINT i=0;i<numClasses;i++) file >> model->label[i];
1562  //We only need to read a new line if we found the label!
1563  file >> word;
1564  }
1565 
1566  //See if we can load the ProbA
1567  //We don't need to read another line here
1568  if(word != "ProbA:"){
1569  model->probA = NULL;
1570  }else{
1571  model->probA = new Float[ halfNumClasses ];
1572  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probA[i];
1573  //We only need to read a new line if we found the label!
1574  file >> word;
1575  }
1576 
1577  //See if we can load the ProbB
1578  //We don't need to read another line here
1579  if(word != "ProbB:"){
1580  model->probB = NULL;
1581  }else{
1582  model->probB = new Float[ halfNumClasses ];
1583  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probB[i];
1584  //We only need to read a new line if we found the label!
1585  file >> word;
1586  }
1587 
1588  //See if we can load the NumSupportVectorsPerClass
1589  //We don't need to read another line here
1590  if(word != "NumSupportVectorsPerClass:"){
1591  model->nSV = NULL;
1592  }else{
1593  model->nSV = new int[ numClasses ];
1594  for(UINT i=0;i<numClasses;i++) file >> model->nSV[i];
1595  //We only need to read a new line if we found the label!
1596  file >> word;
1597  }
1598 
1599  //Load the SupportVectors
1600  //We don't need to read another line here
1601  if(word != "SupportVectors:"){
1602  errorLog << "loadLegacyModelFromFile(fstream &file) - Failed to find SupportVectors header!" << std::endl;
1603  clear();
1604  return false;
1605  }
1606 
1607  //Setup the memory
1608  model->sv_coef = new Float*[numClasses-1];
1609  for(UINT j=0;j<numClasses-1;j++) model->sv_coef[j] = new Float[numSV];
1610  model->SV = new svm_node*[numSV];
1611 
1612  for(UINT i=0; i<numSV; i++){
1613  for(UINT j=0; j<numClasses-1; j++){
1614  file >> model->sv_coef[j][i];
1615  }
1616 
1617  model->SV[i] = new svm_node[numInputDimensions+1];
1618 
1619  if(model->param.kernel_type == PRECOMPUTED) file >> model->SV[i][0].value;
1620  else{
1621  for(UINT j=0; j<numInputDimensions; j++){
1622  file >> model->SV[i][j].index;
1623  file >> model->SV[i][j].value;
1624  }
1625  model->SV[i][numInputDimensions].index = -1; //Assign the final node value
1626  model->SV[i][numInputDimensions].value = 0;
1627  }
1628  }
1629 
1630  //Set the class labels
1631  this->numClasses = getNumClasses();
1632  classLabels.resize(getNumClasses());
1633  for(UINT k=0; k<getNumClasses(); k++){
1634  classLabels[k] = model->label[k];
1635  }
1636 
1637  //The SV have now been loaded so flag that they should be deleted
1638  model->free_sv = 1;
1639 
1640  //Finally, flag that the model has been trained to show it has been loaded and can be used for prediction
1641  trained = true;
1642 
1643  return true;
1644 }
1645 
1646 GRT_END_NAMESPACE
bool saveBaseSettingsToFile(std::fstream &file) const
Definition: Classifier.cpp:255
std::string getKernelType() const
Definition: SVM.cpp:985
bool setCoef0(const Float coef0)
Definition: SVM.cpp:1092
virtual bool loadModelFromFile(std::fstream &file)
Definition: SVM.cpp:627
#define DEFAULT_NULL_LIKELIHOOD_VALUE
Definition: Classifier.h:38
Float getCrossValidationResult() const
Definition: SVM.cpp:1054
bool enableCrossValidationTraining(const bool useCrossValidation)
Definition: SVM.cpp:1116
bool setC(const Float C)
Definition: SVM.cpp:1097
virtual bool clear()
Definition: SVM.cpp:932
bool loadLegacyModelFromFile(std::fstream &file)
Definition: SVM.cpp:1338
virtual ~SVM()
Definition: SVM.cpp:90
bool enableAutoGamma(const bool useAutoGamma)
Definition: SVM.cpp:1111
bool getIsCrossValidationTrainingEnabled() const
Definition: SVM.cpp:946
bool setSVMType(const UINT svmType)
Definition: SVM.cpp:1056
Float getNu() const
Definition: SVM.cpp:1033
std::string getClassifierType() const
Definition: Classifier.cpp:160
SVM(UINT kernelType=LINEAR_KERNEL, UINT svmType=C_SVC, bool useScaling=true, bool useNullRejection=false, bool useAutoGamma=true, Float gamma=0.1, UINT degree=3, Float coef0=0, Float nu=0.5, Float C=1, bool useCrossValidation=false, UINT kFoldValue=10)
Definition: SVM.cpp:28
std::string getSVMType() const
Definition: SVM.cpp:954
bool getIsAutoGammaEnabled() const
Definition: SVM.cpp:950
virtual UINT getNumClasses() const
Definition: SVM.cpp:1014
virtual bool resize(const unsigned int size)
Definition: Vector.h:133
This class acts as a front end for the LIBSVM library (http://www.csie.ntu.edu.tw/~cjlin/libsvm/). It implements a Support Vector Machine (SVM) classifier, a powerful classifier that works well on a wide range of classification problems, particularly on more complex problems that other classifiers (such as the KNN, GMM or ANBC algorithms) might not be able to solve.
void initDefaultSVMSettings()
Definition: SVM.cpp:255
Float getCoef0() const
Definition: SVM.cpp:1040
UINT getNumSamples() const
Definition: SVM.h:49
bool init(UINT kernelType, UINT svmType, bool useScaling, bool useNullRejection, bool useAutoGamma, Float gamma, UINT degree, Float coef0, Float nu, Float C, bool useCrossValidation, UINT kFoldValue)
Definition: SVM.cpp:199
bool setDegree(const UINT degree)
Definition: SVM.cpp:1082
bool setKernelType(const UINT kernelType)
Definition: SVM.cpp:1064
bool copyBaseVariables(const Classifier *classifier)
Definition: Classifier.cpp:92
bool loadBaseSettingsFromFile(std::fstream &file)
Definition: Classifier.cpp:302
Float getGamma() const
Definition: SVM.cpp:1026
UINT getNumDimensions() const
virtual bool train_(ClassificationData &trainingData)
Definition: SVM.cpp:143
bool setKFoldCrossValidationValue(const UINT kFoldValue)
Definition: SVM.cpp:1102
Vector< MinMax > getRanges() const
bool setNu(const Float nu)
Definition: SVM.cpp:1087
virtual bool saveModelToFile(std::fstream &file) const
Definition: SVM.cpp:499
bool setGamma(const Float gamma)
Definition: SVM.cpp:1073
UINT getDegree() const
Definition: SVM.cpp:1019
virtual bool deepCopyFrom(const Classifier *classifier)
Definition: SVM.cpp:116
Float getC() const
Definition: SVM.cpp:1047
virtual bool clear()
Definition: Classifier.cpp:141
SVM & operator=(const SVM &rhs)
Definition: SVM.cpp:94
virtual bool predict_(VectorFloat &inputVector)
Definition: SVM.cpp:172