GestureRecognitionToolkit  Version: 0.2.5
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
SVM.cpp
1 /*
2 GRT MIT License
3 Copyright (c) <2012> <Nicholas Gillian, Media Lab, MIT>
4 
5 Permission is hereby granted, free of charge, to any person obtaining a copy of this software
6 and associated documentation files (the "Software"), to deal in the Software without restriction,
7 including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
9 subject to the following conditions:
10 
11 The above copyright notice and this permission notice shall be included in all copies or substantial
12 portions of the Software.
13 
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
15 LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
16 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
17 WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
18 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
19 */
20 
21 #define GRT_DLL_EXPORTS
22 #include "SVM.h"
23 
24 using namespace LIBSVM;
25 
26 GRT_BEGIN_NAMESPACE
27 
28 //Define the string that will be used to identify the object
29 const std::string SVM::id = "SVM";
30 std::string SVM::getId() { return SVM::id; }
31 
32 //Register the SVM module with the Classifier base class
33 RegisterClassifierModule< SVM > SVM::registerModule( SVM::getId() );
34 
35 SVM::SVM(KernelType kernelType,SVMType svmType,bool useScaling,bool useNullRejection,bool useAutoGamma,Float gamma,UINT degree,Float coef0,Float nu,Float C,bool useCrossValidation,UINT kFoldValue) : Classifier( SVM::getId() )
36 {
37 
38  //Setup the default SVM parameters
39  model = NULL;
40  param.weight_label = NULL;
41  param.weight = NULL;
42  prob.l = 0;
43  prob.x = NULL;
44  prob.y = NULL;
45  trained = false;
46  problemSet = false;
47  param.svm_type = C_SVC;
48  param.kernel_type = LINEAR_KERNEL;
49  param.degree = 3;
50  param.gamma = 0;
51  param.coef0 = 0;
52  param.nu = 0.5;
53  param.cache_size = 100;
54  param.C = 1;
55  param.eps = 1e-3;
56  param.p = 0.1;
57  param.shrinking = 1;
58  param.probability = 1;
59  param.nr_weight = 0;
60  param.weight_label = NULL;
61  param.weight = NULL;
62  this->useScaling = false;
63  this->useCrossValidation = false;
64  this->useNullRejection = false;
65  this->useAutoGamma = true;
66  classificationThreshold = 0.5;
67  crossValidationResult = 0;
68 
69  classifierMode = STANDARD_CLASSIFIER_MODE;
70 
71  init(kernelType,svmType,useScaling,useNullRejection,useAutoGamma,gamma,degree,coef0,nu,C,useCrossValidation,kFoldValue);
72 }
73 
74 SVM::SVM(const SVM &rhs) : Classifier( SVM::getId() )
75 {
76  model = NULL;
77  param.weight_label = NULL;
78  param.weight = NULL;
79  prob.l = 0;
80  prob.x = NULL;
81  prob.y = NULL;
82  classifierMode = STANDARD_CLASSIFIER_MODE;
83  *this = rhs;
84 }
85 
86 
88  clear();
89 }
90 
91 SVM& SVM::operator=(const SVM &rhs){
92  if( this != &rhs ){
93 
94  this->clear();
95 
96  //SVM variables
97  this->problemSet = false; //We do not copy the problem set
98  this->model = rhs.deepCopyModel();
99  this->deepCopyParam( rhs.param, this->param );
100  this->numInputDimensions = rhs.numInputDimensions;
101  this->kFoldValue = rhs.kFoldValue;
102  this->classificationThreshold = rhs.classificationThreshold;
103  this->crossValidationResult = rhs.crossValidationResult;
104  this->useAutoGamma = rhs.useAutoGamma;
105  this->useCrossValidation = rhs.useCrossValidation;
106 
107  //Classifier variables
108  copyBaseVariables( (Classifier*)&rhs );
109  }
110  return *this;
111 }
112 
113 bool SVM::deepCopyFrom(const Classifier *classifier){
114 
115  if( classifier == NULL ) return false;
116 
117  if( this->getId() == classifier->getId() ){
118  const SVM *ptr = dynamic_cast<const SVM*>(classifier);
119 
120  this->clear();
121 
122  //SVM variables
123  this->problemSet = false;
124  this->model = ptr->deepCopyModel();
125  this->deepCopyParam( ptr->param, this->param );
126  this->numInputDimensions = ptr->numInputDimensions;
127  this->kFoldValue = ptr->kFoldValue;
128  this->classificationThreshold = ptr->classificationThreshold;
129  this->crossValidationResult = ptr->crossValidationResult;
130  this->useAutoGamma = ptr->useAutoGamma;
131  this->useCrossValidation = ptr->useCrossValidation;
132 
133  //Classifier variables
134  return copyBaseVariables( classifier );
135  }
136 
137  return false;
138 }
139 
140 bool SVM::train_(ClassificationData &trainingData){
141 
142  //Clear any previous model
143  clear();
144 
145  if( trainingData.getNumSamples() == 0 ){
146  errorLog << __GRT_LOG__ << " Training data has zero samples!" << std::endl;
147  return false;
148  }
149 
150  ranges = trainingData.getRanges();
151  ClassificationData validationData;
152 
153  //Scale the training data if needed
154  if( useScaling ){
155  //Scale the training data between 0 and 1
156  trainingData.scale(SVM_MIN_SCALE_RANGE, SVM_MAX_SCALE_RANGE);
157  }
158 
159  if( useValidationSet ){
160  validationData = trainingData.split( 100-validationSetSize );
161  }
162 
163  //Convert the labelled classification data into the LIBSVM data format
164  if( !convertClassificationDataToLIBSVMFormat(trainingData) ){
165  errorLog << __GRT_LOG__ << " Failed To Convert Classification Data To LIBSVM Format!" << std::endl;
166  return false;
167  }
168 
169  if( useAutoGamma ) param.gamma = 1.0/numInputDimensions;
170 
171  //Train the model
172  bool trainingResult = trainSVM();
173 
174  if(! trainingResult ){
175  errorLog << __GRT_LOG__ << " Failed To Train SVM Model!" << std::endl;
176  return false;
177  }
178 
179  //Flag that the models have been trained
180  trained = true;
181  converged = true;
182 
183  //Compute the final training stats
184  trainingSetAccuracy = 0;
185  validationSetAccuracy = 0;
186 
187  //If scaling was on, then the data will already be scaled, so turn it off temporially so we can test the model accuracy
188  bool scalingState = useScaling;
189  useScaling = false;
190  if( !computeAccuracy( trainingData, trainingSetAccuracy ) ){
191  trained = false;
192  errorLog << __GRT_LOG__ << " Failed to compute training set accuracy! Failed to fully train model!" << std::endl;
193  return false;
194  }
195 
196  if( useValidationSet ){
197  if( !computeAccuracy( validationData, validationSetAccuracy ) ){
198  trained = false;
199  errorLog << __GRT_LOG__ << " Failed to compute validation set accuracy! Failed to fully train model!" << std::endl;
200  return false;
201  }
202  }
203 
204  trainingLog << "Training set accuracy: " << trainingSetAccuracy << std::endl;
205 
206  if( useValidationSet ){
207  trainingLog << "Validation set accuracy: " << validationSetAccuracy << std::endl;
208  }
209 
210  //Reset the scaling state for future prediction
211  useScaling = scalingState;
212 
213  return trained;
214 }
215 
216 bool SVM::predict_(VectorFloat &inputVector){
217 
218  if( !trained ){
219  errorLog << __GRT_LOG__ << " The SVM model has not been trained!" << std::endl;
220  return false;
221  }
222 
223  if( inputVector.getSize() != numInputDimensions ){
224  errorLog << __GRT_LOG__ << " The size of the input vector (" << inputVector.getSize() << ") does not match the number of features of the model (" << numInputDimensions << ")" << std::endl;
225  return false;
226  }
227 
228  if( param.probability == 1 ){
229  if( !predictSVM( inputVector, maxLikelihood, classLikelihoods ) ){
230  errorLog << __GRT_LOG__ << " Prediction Failed!" << std::endl;
231  return false;
232  }
233  }else{
234  if( !predictSVM( inputVector ) ){
235  errorLog << __GRT_LOG__ << " Prediction Failed!" << std::endl;
236  return false;
237  }
238  }
239 
240  return true;
241 }
242 
243 bool SVM::init(KernelType kernelType,SVMType svmType,bool useScaling,bool useNullRejection,bool useAutoGamma,Float gamma,UINT degree,Float coef0,Float nu,Float C,bool useCrossValidation,UINT kFoldValue){
244 
245  //Clear any previous models or problems
246  clear();
247 
248  //Validate the kernerlType
249  if( !validateKernelType(kernelType) ){
250  errorLog << __GRT_LOG__ << " Unknown kernelType!\n";
251  return false;
252  }
253 
254  if( !validateSVMType(svmType) ){
255  errorLog << __GRT_LOG__ << " Unknown kernelType!\n";
256  return false;
257  }
258 
259  param.svm_type = (int)svmType;
260  param.kernel_type = (int)kernelType;
261  param.degree = (int)degree;
262  param.gamma = gamma;
263  param.coef0 = coef0;
264  param.nu = nu;
265  param.cache_size = 100;
266  param.C = C;
267  param.eps = 1e-3;
268  param.p = 0.1;
269  param.shrinking = 1;
270  param.probability = 1;
271  param.nr_weight = 0;
272  param.weight_label = NULL;
273  param.weight = NULL;
274  this->useScaling = useScaling;
275  this->useCrossValidation = useCrossValidation;
276  this->useNullRejection = useNullRejection;
277  this->useAutoGamma = useAutoGamma;
278  classificationThreshold = 0.5;
279  crossValidationResult = 0;
280 
281  return true;
282 }
283 
284 void SVM::deleteProblemSet(){
285  if( problemSet ){
286  for(int i=0; i<prob.l; i++){
287  delete[] prob.x[i];
288  prob.x[i] = NULL;
289  }
290  delete[] prob.x;
291  delete[] prob.y;
292  prob.l = 0;
293  prob.x = NULL;
294  prob.y = NULL;
295  problemSet = false;
296  }
297 }
298 
300 
301  //Clear any previous models, parameters or probelms
302  clear();
303 
304  //Setup the SVM parameters
305  param.svm_type = C_SVC;
306  param.kernel_type = LINEAR_KERNEL;
307  param.degree = 3;
308  param.gamma = 0;
309  param.coef0 = 0;
310  param.nu = 0.5;
311  param.cache_size = 100;
312  param.C = 1;
313  param.eps = 1e-3;
314  param.p = 0.1;
315  param.shrinking = 1;
316  param.probability = 1;
317  param.nr_weight = 0;
318  param.weight_label = NULL;
319  param.weight = NULL;
320  useCrossValidation = false;
321  kFoldValue = 10;
322  useAutoGamma = true;
323 }
324 
325 bool SVM::validateProblemAndParameters(){
326  //Check the parameters match the problem
327  const char *errorMsg = svm_check_parameter(&prob,&param);
328 
329  if( errorMsg ){
330  errorLog << __GRT_LOG__ << " Parameters do not match problem! error: " << errorMsg << std::endl;
331  return false;
332  }
333 
334  return true;
335 }
336 
337 bool SVM::trainSVM(){
338 
339  crossValidationResult = 0;
340 
341  //Erase any previous models
342  if( trained ){
343  svm_free_and_destroy_model(&model);
344  trained = false;
345  }
346 
347  //Check to make sure the problem has been set
348  if( !problemSet ){
349  errorLog << __GRT_LOG__ << " Problem not set!" << std::endl;
350  return false;
351  }
352 
353  //Verify the problem and the parameters
354  if( !validateProblemAndParameters() ) return false;
355 
356  if( useCrossValidation ){
357  int i;
358  Float total_correct = 0;
359  Float total_error = 0;
360  Float sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
361  Float *target = new Float[prob.l];
362 
363  svm_cross_validation(&prob,&param,kFoldValue,target);
364  if( param.svm_type == EPSILON_SVR || param.svm_type == NU_SVR )
365  {
366  for(i=0;i<prob.l;i++)
367  {
368  Float y = prob.y[i];
369  Float v = target[i];
370  total_error += (v-y)*(v-y);
371  sumv += v;
372  sumy += y;
373  sumvv += v*v;
374  sumyy += y*y;
375  sumvy += v*y;
376  }
377  crossValidationResult = total_error/prob.l;
378  }
379  else
380  {
381  for(i=0;i<prob.l;i++){
382  if(target[i] == prob.y[i]){
383  ++total_correct;
384  }
385  }
386  crossValidationResult = total_correct/prob.l*100.0;
387  }
388  delete[] target;
389  }
390 
391  //Train the SVM - if we are running cross validation then the CV will be run first followed by a full train
392  model = svm_train(&prob,&param);
393 
394  if( model == NULL ){
395  errorLog << __GRT_LOG__ << " Failed to train SVM Model!" << std::endl;
396  return false;
397  }
398 
399  if( model != NULL ){
400  trained = true;
401  numClasses = getNumClasses();
402  classLabels.resize( getNumClasses() );
403  for(UINT k=0; k<getNumClasses(); k++){
404  classLabels[k] = model->label[k];
405  }
406  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
407  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
408  }
409 
410  return trained;
411 }
412 
413 bool SVM::predictSVM(VectorFloat &inputVector){
414 
415  if( !trained || inputVector.size() != numInputDimensions ) return false;
416 
417  svm_node *x = NULL;
418 
419  //Copy the input data into the SVM format
420  x = new svm_node[numInputDimensions+1];
421  for(UINT j=0; j<numInputDimensions; j++){
422  x[j].index = (int)j+1;
423  x[j].value = inputVector[j];
424  }
425  //The last value in the input vector must be set to -1
426  x[numInputDimensions].index = -1;
427  x[numInputDimensions].value = 0;
428 
429  //Scale the input data if required
430  if( useScaling ){
431  for(UINT i=0; i<numInputDimensions; i++)
432  x[i].value = grt_scale(x[i].value,ranges[i].minValue,ranges[i].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
433  }
434 
435  //Perform the SVM prediction
436  Float predict_label = svm_predict(model,x);
437 
438  //We can't do null rejection without the probabilities, so just set the predicted class
439  predictedClassLabel = (UINT)predict_label;
440 
441  //Clean up the memory
442  delete[] x;
443 
444  return true;
445 }
446 
447 bool SVM::predictSVM(VectorFloat &inputVector,Float &maxProbability, VectorFloat &probabilites){
448 
449  if( !trained || param.probability == 0 || inputVector.size() != numInputDimensions ) return false;
450 
451  Float *prob_estimates = NULL;
452  svm_node *x = NULL;
453 
454  //Setup the memory for the probability estimates
455  prob_estimates = new Float[ model->nr_class ];
456 
457  //Copy the input data into the SVM format
458  x = new svm_node[numInputDimensions+1];
459  for(UINT j=0; j<numInputDimensions; j++){
460  x[j].index = (int)j+1;
461  x[j].value = inputVector[j];
462  }
463  //The last value in the input vector must be set to -1
464  x[numInputDimensions].index = -1;
465  x[numInputDimensions].value = 0;
466 
467  //Scale the input data if required
468  if( useScaling ){
469  for(UINT j=0; j<numInputDimensions; j++)
470  x[j].value = grt_scale(x[j].value,ranges[j].minValue,ranges[j].maxValue,SVM_MIN_SCALE_RANGE,SVM_MAX_SCALE_RANGE);
471  }
472 
473  //Perform the SVM prediction
474  Float predict_label = svm_predict_probability(model,x,prob_estimates);
475 
476  predictedClassLabel = 0;
477  maxProbability = 0;
478  probabilites.resize(model->nr_class);
479  for(int k=0; k<model->nr_class; k++){
480  if( maxProbability < prob_estimates[k] ){
481  maxProbability = prob_estimates[k];
482  predictedClassLabel = k+1;
483  maxLikelihood = maxProbability;
484  }
485  probabilites[k] = prob_estimates[k];
486  }
487 
488  if( !useNullRejection ) predictedClassLabel = (UINT)predict_label;
489  else{
490  if( maxProbability >= classificationThreshold ){
491  predictedClassLabel = (UINT)predict_label;
492  }else predictedClassLabel = GRT_DEFAULT_NULL_CLASS_LABEL;
493  }
494 
495  //Clean up the memory
496  delete[] prob_estimates;
497  delete[] x;
498 
499  return true;
500 }
501 
502 bool SVM::convertClassificationDataToLIBSVMFormat(ClassificationData &trainingData){
503 
504  //clear any previous problems
505  deleteProblemSet();
506 
507  const UINT numTrainingExamples = trainingData.getNumSamples();
508  numInputDimensions = trainingData.getNumDimensions();
509  numOutputDimensions = trainingData.getNumClasses();
510 
511  //Init the memory
512  prob.l = numTrainingExamples;
513  prob.x = new svm_node*[numTrainingExamples];
514  prob.y = new Float[numTrainingExamples];
515  problemSet = true;
516 
517  for(UINT i=0; i<numTrainingExamples; i++){
518  //Set the class ID
519  prob.y[i] = trainingData[i].getClassLabel();
520 
521  //Assign the memory for this training example, note that a dummy node is needed at the end of the vector
522  prob.x[i] = new svm_node[numInputDimensions+1];
523  for(UINT j=0; j<numInputDimensions; j++){
524  prob.x[i][j].index = j+1;
525  prob.x[i][j].value = trainingData[i].getSample()[j];
526  }
527  prob.x[i][numInputDimensions].index = -1; //Assign the final node value
528  prob.x[i][numInputDimensions].value = 0;
529  }
530 
531  return true;
532 }
533 
534 bool SVM::save( std::fstream &file ) const{
535 
536  if( !file.is_open() ){
537  return false;
538  }
539 
540  file << "SVM_MODEL_FILE_V2.0\n";
541 
542  //Write the classifier settings to the file
544  errorLog << __GRT_LOG__ << " Failed to save classifier base settings to file!" << std::endl;
545  return false;
546  }
547 
548  const svm_parameter& param = trained ? model->param : this->param;
549 
550  file << "ModelType: ";
551  switch( param.svm_type ){
552  case(C_SVC):
553  file << "C_SVC";
554  break;
555  case(NU_SVC):
556  file << "NU_SVC";
557  break;
558  case(ONE_CLASS):
559  file << "ONE_CLASS";
560  break;
561  case(EPSILON_SVR):
562  file << "EPSILON_SVR";
563  break;
564  case(NU_SVR):
565  file << "NU_SVR";
566  break;
567  default:
568  errorLog << __GRT_LOG__ << " Invalid model type: " << param.svm_type << std::endl;
569  return false;
570  break;
571  }
572  file << std::endl;
573 
574  file << "KernelType: ";
575  switch(param.kernel_type){
576  case(LINEAR):
577  file << "LINEAR";
578  break;
579  case(POLY):
580  file << "POLYNOMIAL";
581  break;
582  case(RBF):
583  file << "RBF";
584  break;
585  case(SIGMOID):
586  file << "SIGMOID";
587  break;
588  case(PRECOMPUTED):
589  file << "PRECOMPUTED";
590  break;
591  default:
592  errorLog << __GRT_LOG__ << " Invalid kernel type: " << param.kernel_type << std::endl;
593  return false;
594  break;
595  }
596  file << std::endl;
597  file << "Degree: " << param.degree << std::endl;
598  file << "Gamma: " << param.gamma << std::endl;
599  file << "Coef0: " << param.coef0 << std::endl;
600  file << "NumberOfFeatures: " << numInputDimensions << std::endl;
601  file << "UseShrinking: " << param.shrinking << std::endl;
602  file << "UseProbability: " << param.probability << std::endl;
603 
604  if( trained ){
605  UINT numClasses = (UINT)model->nr_class;
606  UINT numSV = (UINT)model->l;
607  file << "NumberOfSupportVectors: " << numSV << std::endl;
608 
609  file << "RHO: \n";
610  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->rho[i] << "\t";
611  file << "\n";
612 
613  if(model->label){
614  file << "Label: \n";
615  for(UINT i=0;i<numClasses;i++) file << model->label[i] << "\t";
616  file << "\n";
617  }
618 
619  if(model->probA){ // regression has probA only
620  file << "ProbA: \n";
621  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->probA[i] << "\t";
622  file << "\n";
623  }
624 
625  if(model->probB){
626  file << "ProbB: \n";
627  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file << model->probB[i] << "\t";
628  file << "\n";
629  }
630 
631  if(model->nSV){
632  file << "NumSupportVectorsPerClass: \n";
633  for(UINT i=0;i<numClasses;i++) file << model->nSV[i] << "\t";
634  file << "\n";
635  }
636 
637  file << "SupportVectors: \n";
638 
639  const Float * const *sv_coef = model->sv_coef;
640  const svm_node * const *SV = model->SV;
641 
642  for(UINT i=0;i<numSV;i++){
643  for(UINT j=0;j<numClasses-1;j++)
644  file << sv_coef[j][i] << "\t";
645 
646  const svm_node *p = SV[i];
647 
648  if(param.kernel_type == PRECOMPUTED) file << (int) p->value << "\t";
649  else{
650  while(p->index != -1){
651  file << p->index << "\t" << p->value << "\t";
652  p++;
653  }
654  file << "\n";
655  }
656  }
657  }
658 
659  return true;
660 }
661 
662 bool SVM::load( std::fstream &file ){
663 
664  std::string word;
665  UINT numSV = 0;
666  UINT halfNumClasses = 0;
667 
668  //Clear any previous models, parameters or problems
669  clear();
670 
671  if( !file.is_open() ){
672  errorLog << __GRT_LOG__ << " The file is not open!" << std::endl;
673  return false;
674  }
675 
676  //Read the file header
677  file >> word;
678 
679  //Check to see if we should load a legacy file
680  if( word == "SVM_MODEL_FILE_V1.0" ){
681  return loadLegacyModelFromFile( file );
682  }
683 
684  //Check to make sure this is a file with the correct File Format
685  if( word != "SVM_MODEL_FILE_V2.0" ){
686  errorLog << __GRT_LOG__ << " Invalid file format!" << std::endl;
687  clear();
688  return false;
689  }
690 
691  //Load the base settings from the file
693  errorLog << __GRT_LOG__ << " Failed to load base settings from file!" << std::endl;
694  return false;
695  }
696 
697  //Init the memory for the model
698  model = new svm_model;
699  model->nr_class = 0;
700  model->l = 0;
701  model->SV = NULL;
702  model->sv_coef = NULL;
703  model->rho = NULL;
704  model->probA = NULL;
705  model->probB = NULL;
706  model->label = NULL;
707  model->nSV = NULL;
708  model->label = NULL;
709  model->nSV = NULL;
710  model->free_sv = 0; //This will be set to 1 if everything is loaded OK
711 
712  //Init the memory for the parameters
713  model->param.svm_type = 0;
714  model->param.kernel_type = 0;
715  model->param.degree = 0;
716  model->param.gamma = 0;
717  model->param.coef0 = 0;
718  model->param.cache_size = 0;
719  model->param.eps = 0;
720  model->param.C = 0;
721  model->param.nr_weight = 0;
722  model->param.weight_label = NULL;
723  model->param.weight = NULL;
724  model->param.nu = 0;
725  model->param.p = 0;
726  model->param.shrinking = 0;
727  model->param.probability = 1;
728 
729  //Load the model type
730  file >> word;
731  if(word != "ModelType:"){
732  errorLog << __GRT_LOG__ << " Failed to find ModelType header!" << std::endl;
733  clear();
734  return false;
735  }
736  file >> word;
737  if( word == "C_SVC" ){
738  model->param.svm_type = C_SVC;
739  }else{
740  if( word == "NU_SVC" ){
741  model->param.svm_type = NU_SVC;
742  }else{
743  if( word == "ONE_CLASS" ){
744  model->param.svm_type = ONE_CLASS;
745  }else{
746  if( word == "EPSILON_SVR" ){
747  model->param.svm_type = EPSILON_SVR;
748  }else{
749  if( word == "NU_SVR" ){
750  model->param.svm_type = NU_SVR;
751  }else{
752  errorLog << __GRT_LOG__ << " Failed to find SVM type!" << std::endl;
753  clear();
754  return false;
755  }
756  }
757  }
758  }
759  }
760 
761  //Load the model type
762  file >> word;
763  if(word != "KernelType:"){
764  errorLog << __GRT_LOG__ << " Failed to find kernel type!" << std::endl;
765  clear();
766  return false;
767  }
768  file >> word;
769  if( word == "LINEAR" ){
770  model->param.kernel_type = LINEAR;
771  }else{
772  if( word == "POLYNOMIAL" ){
773  model->param.kernel_type = POLY;
774  }else{
775  if( word == "RBF" ){
776  model->param.kernel_type = RBF;
777  }else{
778  if( word == "SIGMOID" ){
779  model->param.kernel_type = SIGMOID;
780  }else{
781  if( word == "PRECOMPUTED" ){
782  model->param.kernel_type = PRECOMPUTED;
783  }else{
784  errorLog << __GRT_LOG__ << " Failed to find kernel type!" << std::endl;
785  clear();
786  return false;
787  }
788  }
789  }
790  }
791  }
792 
793  //Load the degree
794  file >> word;
795  if(word != "Degree:"){
796  errorLog << __GRT_LOG__ << " Failed to find Degree header!" << std::endl;
797  clear();
798  return false;
799  }
800  file >> model->param.degree;
801 
802  //Load the gamma
803  file >> word;
804  if(word != "Gamma:"){
805  errorLog << __GRT_LOG__ << " Failed to find Gamma header!" << std::endl;
806  clear();
807  return false;
808  }
809  file >> model->param.gamma;
810 
811  //Load the Coef0
812  file >> word;
813  if(word != "Coef0:"){
814  errorLog << __GRT_LOG__ << " Failed to find Coef0 header!" << std::endl;
815  clear();
816  return false;
817  }
818  file >> model->param.coef0;
819 
820  //Load the NumberOfFeatures
821  file >> word;
822  if(word != "NumberOfFeatures:"){
823  errorLog << __GRT_LOG__ << " Failed to find NumberOfFeatures header!" << std::endl;
824  clear();
825  return false;
826  }
827  file >> numInputDimensions;
828 
829  //Load the UseShrinking
830  file >> word;
831  if(word != "UseShrinking:"){
832  errorLog << __GRT_LOG__ << " Failed to find UseShrinking header!" << std::endl;
833  clear();
834  return false;
835  }
836  file >> model->param.shrinking;
837 
838  //Load the UseProbability
839  file >> word;
840  if(word != "UseProbability:"){
841  errorLog << __GRT_LOG__ << " Failed to find UseProbability header!" << std::endl;
842  clear();
843  return false;
844  }
845  file >> model->param.probability;
846 
847  if( trained ){
848  //Load the NumberOfSupportVectors
849  file >> word;
850  if(word != "NumberOfSupportVectors:"){
851  errorLog << __GRT_LOG__ << " Failed to find NumberOfSupportVectors header!" << std::endl;
852  clear();
853  return false;
854  }
855  file >> numSV;
856 
857  //Setup the values
858  halfNumClasses = numClasses*(numClasses-1)/2;
859  model->nr_class = numClasses;
860  model->l = numSV;
861 
862  //Load the RHO
863  file >> word;
864  if(word != "RHO:"){
865  errorLog << __GRT_LOG__ << " Failed to find RHO header!" << std::endl;
866  clear();
867  return false;
868  }
869  model->rho = new Float[ halfNumClasses ];
870  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->rho[i];
871 
872  //See if we can load the Labels
873  file >> word;
874  if(word != "Label:"){
875  model->label = NULL;
876  }else{
877  model->label = new int[ numClasses ];
878  for(UINT i=0;i<numClasses;i++) file >> model->label[i];
879  //We only need to read a new line if we found the label!
880  file >> word;
881  }
882 
883  //See if we can load the ProbA
884  //We don't need to read another line here
885  if(word != "ProbA:"){
886  model->probA = NULL;
887  }else{
888  model->probA = new Float[ halfNumClasses ];
889  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probA[i];
890  //We only need to read a new line if we found the label!
891  file >> word;
892  }
893 
894  //See if we can load the ProbB
895  //We don't need to read another line here
896  if(word != "ProbB:"){
897  model->probB = NULL;
898  }else{
899  model->probB = new Float[ halfNumClasses ];
900  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probB[i];
901  //We only need to read a new line if we found the label!
902  file >> word;
903  }
904 
905  //See if we can load the NumSupportVectorsPerClass
906  //We don't need to read another line here
907  if( word == "NumSupportVectorsPerClass:" ){
908  model->nSV = new int[ numClasses ];
909  for(UINT i=0; i<numClasses; i++) file >> model->nSV[i];
910  //We only need to read a new line if we found the label!
911  file >> word;
912  }else{
913  model->nSV = NULL;
914  }
915 
916  //Load the SupportVectors
917  //We don't need to read another line here
918  if(word != "SupportVectors:"){
919  errorLog << __GRT_LOG__ << " Failed to find SupportVectors header!" << std::endl;
920  clear();
921  return false;
922  }
923 
924  //Setup the memory
925  model->sv_coef = new Float*[numClasses-1];
926  for(UINT j=0;j<numClasses-1;j++) model->sv_coef[j] = new Float[numSV];
927  model->SV = new svm_node*[numSV];
928 
929  for(UINT i=0; i<numSV; i++){
930  for(UINT j=0; j<numClasses-1; j++){
931  file >> model->sv_coef[j][i];
932  }
933 
934  model->SV[i] = new svm_node[numInputDimensions+1];
935 
936  if(model->param.kernel_type == PRECOMPUTED) file >> model->SV[i][0].value;
937  else{
938  for(UINT j=0; j<numInputDimensions; j++){
939  file >> model->SV[i][j].index;
940  file >> model->SV[i][j].value;
941  }
942  model->SV[i][numInputDimensions].index = -1; //Assign the final node value
943  model->SV[i][numInputDimensions].value = 0;
944  }
945  }
946 
947  //Set the class labels
948  this->numClasses = getNumClasses();
949  classLabels.resize(getNumClasses());
950  for(UINT k=0; k<getNumClasses(); k++){
951  classLabels[k] = model->label[k];
952  }
953 
954  //The SV have now been loaded so flag that they should be deleted
955  model->free_sv = 1;
956 
957  //Resize the prediction results to make sure it is setup for realtime prediction
958  maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
959  bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
960  classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
961  classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
962  }
963 
964  return true;
965 }
966 
967 bool SVM::clear(){
968 
969  //Clear the base class
971 
972  crossValidationResult = 0;
973  trained = false;
974  svm_free_and_destroy_model(&model);
975  svm_destroy_param(&param);
976  deleteProblemSet();
977 
978  return true;
979 }
980 
982  return useCrossValidation;
983 }
984 
986  return useAutoGamma;
987 }
988 
989 std::string SVM::getSVMType() const{
990 
991  const struct svm_parameter *paramPtr = NULL;
992  std::string modelName = "UNKNOWN";
993  if( trained ){
994  paramPtr = &model->param;
995  }else paramPtr = &param;
996 
997  switch(paramPtr->svm_type){
998  case(C_SVC):
999  modelName = "C_SVC";
1000  break;
1001  case(NU_SVC):
1002  modelName = "NU_SVC";
1003  break;
1004  case(ONE_CLASS):
1005  modelName = "ONE_CLASS";
1006  break;
1007  case(EPSILON_SVR):
1008  modelName = "EPSILON_SVR";
1009  break;
1010  case(NU_SVR):
1011  modelName = "NU_SVR";
1012  break;
1013  default:
1014  break;
1015  }
1016 
1017  return modelName;
1018 }
1019 
1020 std::string SVM::getKernelType() const{
1021  const struct svm_parameter *paramPtr = NULL;
1022  std::string modelName = "UNKNOWN";
1023  if( trained ){
1024  paramPtr = &model->param;
1025  }else paramPtr = &param;
1026 
1027  switch(paramPtr->kernel_type){
1028  case(LINEAR_KERNEL):
1029  modelName = "LINEAR_KERNEL";
1030  break;
1031  case(POLY_KERNEL):
1032  modelName = "POLY_KERNEL";
1033  break;
1034  case(RBF_KERNEL):
1035  modelName = "RBF_KERNEL";
1036  break;
1037  case(SIGMOID_KERNEL):
1038  modelName = "SIGMOID_KERNEL";
1039  break;
1040  case(PRECOMPUTED_KERNEL):
1041  modelName = "PRECOMPUTED_KERNEL";
1042  break;
1043  default:
1044  break;
1045  }
1046  return modelName;
1047 }
1048 
1049 UINT SVM::getNumClasses() const{
1050  if( !trained ) return 0;
1051  return (UINT) model->nr_class;
1052 }
1053 
1054 UINT SVM::getDegree() const{
1055  if( trained ){
1056  return (UINT)model->param.degree;
1057  }
1058  return (UINT)param.gamma;
1059 }
1060 
1061 Float SVM::getGamma() const{
1062  if( trained ){
1063  return model->param.gamma;
1064  }
1065  return param.gamma;
1066 }
1067 
1068 Float SVM::getNu() const{
1069  if( trained ){
1070  return model->param.nu;
1071  }
1072  return param.gamma;
1073 }
1074 
1075 Float SVM::getCoef0() const{
1076  if( trained ){
1077  return model->param.coef0;
1078  }
1079  return param.gamma;
1080 }
1081 
1082 Float SVM::getC() const{
1083  if( trained ){
1084  return model->param.C;
1085  }
1086  return param.gamma;
1087 }
1088 
1089 Float SVM::getCrossValidationResult() const{ return crossValidationResult; }
1090 
1091 const struct LIBSVM::svm_model* SVM::getLIBSVMModel() const { return model; }
1092 
1093 bool SVM::setSVMType(const SVMType svmType){
1094  if( validateSVMType(svmType) ){
1095  param.svm_type = (int)svmType;
1096  return true;
1097  }
1098  return false;
1099 }
1100 
1101 bool SVM::setKernelType(const KernelType kernelType){
1102 
1103  if( validateKernelType(kernelType) ){
1104  param.kernel_type = (int)kernelType;
1105  return true;
1106  }
1107  warningLog << __GRT_LOG__ << " Failed to set kernel type, unknown kernelType!" << std::endl;
1108  return false;
1109 }
1110 
1111 bool SVM::setGamma(const Float gamma){
1112  if( !useAutoGamma ){
1113  this->param.gamma = gamma;
1114  return true;
1115  }
1116  warningLog << __GRT_LOG__ << " Failed to set gamma, useAutoGamma is enabled, setUseAutoGamma to false first!" << std::endl;
1117  return false;
1118 }
1119 
1120 bool SVM::setDegree(const UINT degree){
1121  this->param.degree = (int)degree;
1122  return true;
1123 }
1124 
1125 bool SVM::setNu(const Float nu){
1126  this->param.nu = nu;
1127  return true;
1128 }
1129 
1130 bool SVM::setCoef0(const Float coef0){
1131  this->param.coef0 = coef0;
1132  return true;
1133 }
1134 
1135 bool SVM::setC(const Float C){
1136  this->param.C = C;
1137  return true;
1138 }
1139 
1140 bool SVM::setKFoldCrossValidationValue(const UINT kFoldValue){
1141  if( kFoldValue > 0 ){
1142  this->kFoldValue = kFoldValue;
1143  return true;
1144  }
1145  warningLog << __GRT_LOG__ << " Failed to set kFoldValue, the kFoldValue must be greater than 0!" << std::endl;
1146  return false;
1147 }
1148 
1149 bool SVM::enableAutoGamma(const bool useAutoGamma){
1150  this->useAutoGamma = useAutoGamma;
1151  return true;
1152 }
1153 
1154 bool SVM::enableCrossValidationTraining(const bool useCrossValidation){
1155  this->useCrossValidation = useCrossValidation;
1156  return true;
1157 }
1158 
1159 bool SVM::validateSVMType(const SVMType svmType){
1160  if( svmType == C_SVC ){
1161  return true;
1162  }
1163  if( svmType == NU_SVC ){
1164  return true;
1165  }
1166  if( svmType == ONE_CLASS ){
1167  return true;
1168  }
1169  if( svmType == EPSILON_SVR ){
1170  return true;
1171  }
1172  if( svmType == NU_SVR ){
1173  return true;
1174  }
1175  return false;
1176 }
1177 
1178 bool SVM::validateKernelType(const KernelType kernelType){
1179  if( kernelType == LINEAR_KERNEL ){
1180  return true;
1181  }
1182  if( kernelType == POLY_KERNEL ){
1183  return true;
1184  }
1185  if( kernelType == RBF_KERNEL ){
1186  return true;
1187  }
1188  if( kernelType == SIGMOID_KERNEL ){
1189  return true;
1190  }
1191  if( kernelType == PRECOMPUTED_KERNEL ){
1192  return true;
1193  }
1194  return false;
1195 }
1196 
1197 struct svm_model* SVM::deepCopyModel() const{
1198 
1199  if( model == NULL ) return NULL;
1200 
1201  UINT halfNumClasses = 0;
1202 
1203  //Init the memory for the model
1204  struct svm_model *m = new svm_model;
1205  m->nr_class = 0;
1206  m->l = 0;
1207  m->SV = NULL;
1208  m->sv_coef = NULL;
1209  m->rho = NULL;
1210  m->probA = NULL;
1211  m->probB = NULL;
1212  m->label = NULL;
1213  m->nSV = NULL;
1214  m->label = NULL;
1215  m->nSV = NULL;
1216  m->free_sv = 0; //This will be set to 1 if everything is loaded OK
1217 
1218  //Init the memory for the parameters
1219  m->param.svm_type = 0;
1220  m->param.kernel_type = 0;
1221  m->param.degree = 0;
1222  m->param.gamma = 0;
1223  m->param.coef0 = 0;
1224  m->param.cache_size = 0;
1225  m->param.eps = 0;
1226  m->param.C = 0;
1227  m->param.nr_weight = 0;
1228  m->param.weight_label = NULL;
1229  m->param.weight = NULL;
1230  m->param.nu = 0;
1231  m->param.p = 0;
1232  m->param.shrinking = 0;
1233  m->param.probability = 1;
1234 
1235  //Copy the parameters
1236  m->param.svm_type = model->param.svm_type;
1237  m->param.kernel_type = model->param.kernel_type ;
1238  m->param.degree = model->param.degree;
1239  m->param.gamma = model->param.gamma;
1240  m->param.coef0 = model->param.coef0;
1241  m->nr_class = model->nr_class;
1242  m->l = model->l;
1243  m->param.shrinking = model->param.shrinking;
1244  m->param.probability = model->param.probability;
1245 
1246  //Setup the values
1247  halfNumClasses = model->nr_class*(model->nr_class-1)/2;
1248 
1249  m->rho = new Float[ halfNumClasses ];
1250  for(int i=0;i <model->nr_class*(model->nr_class-1)/2; i++) m->rho[i] = model->rho[i];
1251 
1252  if( model->label != NULL ){
1253  m->label = new int[ model->nr_class ];
1254  for(int i=0;i<model->nr_class;i++) m->label[i] = model->label[i];
1255  }
1256 
1257  if( model->probA != NULL ){
1258  m->probA = new Float[ halfNumClasses ];
1259  for(UINT i=0;i<halfNumClasses; i++) m->probA[i] = model->probA[i];
1260  }
1261 
1262  if( model->probB != NULL ){
1263  m->probB = new Float[ halfNumClasses ];
1264  for(UINT i=0; i<halfNumClasses; i++) m->probB[i] = model->probB[i];
1265  }
1266 
1267  if( model->nSV != NULL ){
1268  m->nSV = new int[ model->nr_class ];
1269  for(int i=0; i<model->nr_class; i++) m->nSV[i] = model->nSV[i];
1270  }
1271 
1272  //Setup the memory
1273  m->sv_coef = new Float*[numClasses-1];
1274  for(UINT j=0;j<numClasses-1;j++) m->sv_coef[j] = new Float[model->l];
1275  m->SV = new svm_node*[model->l];
1276 
1277  for(int i=0; i<model->l; i++){
1278  for(int j=0; j<model->nr_class-1; j++){
1279  m->sv_coef[j][i] = model->sv_coef[j][i];
1280  }
1281 
1282  m->SV[i] = new svm_node[numInputDimensions+1];
1283 
1284  if(model->param.kernel_type == PRECOMPUTED) m->SV[i][0].value = model->SV[i][0].value;
1285  else{
1286  for(UINT j=0; j<numInputDimensions; j++){
1287  m->SV[i][j].index = model->SV[i][j].index;
1288  m->SV[i][j].value = model->SV[i][j].value;
1289  }
1290  m->SV[i][numInputDimensions].index = -1; //Assign the final node value
1291  m->SV[i][numInputDimensions].value = 0;
1292  }
1293  }
1294 
1295  //The SV have now been loaded so flag that they should be deleted
1296  m->free_sv = 1;
1297 
1298  return m;
1299 }
1300 
1301 bool SVM::deepCopyProblem( const struct svm_problem &source, struct svm_problem &target, const unsigned int numInputDimensions ) const{
1302 
1303  //Cleanup the target memory
1304  if( target.y != NULL ){
1305  delete[] target.y;
1306  target.y = NULL;
1307  }
1308  if( target.x != NULL ){
1309  for(int i=0; i<target.l; i++){
1310  delete[] target.x[i];
1311  target.x[i] = NULL;
1312  }
1313  }
1314 
1315  //Deep copy the source to the target
1316  target.l = source.l;
1317 
1318  if( source.x != NULL ){
1319  target.x = new svm_node*[ target.l ];
1320  for(int i=0; i<target.l; i++){
1321  target.x[i] = new svm_node[ numInputDimensions+1 ];
1322  for(unsigned int j=0; j<numInputDimensions+1; j++){
1323  target.x[i][j] = source.x[i][j];
1324  }
1325  }
1326  }
1327 
1328  if( source.y != NULL ){
1329  target.y = new Float[ target.l ];
1330  for(int i=0; i<target.l; i++){
1331  target.y[i] = source.y[i];
1332  }
1333  }
1334 
1335  return true;
1336 }
1337 
1338 bool SVM::deepCopyParam( const svm_parameter &source_param, svm_parameter &target_param ) const{
1339 
1340  //Cleanup any dynamic memory in the target
1341  if( target_param.weight_label != NULL ){
1342  delete[] target_param.weight_label;
1343  target_param.weight_label = NULL;
1344  }
1345  if( target_param.weight != NULL ){
1346  delete[] target_param.weight;
1347  target_param.weight = NULL;
1348  }
1349 
1350  //Copy the non dynamic variables
1351  target_param.svm_type = source_param.svm_type;
1352  target_param.kernel_type = source_param.kernel_type;
1353  target_param.degree = source_param.degree;
1354  target_param.gamma = source_param.gamma;
1355  target_param.coef0 = source_param.coef0;
1356  target_param.cache_size = source_param.cache_size;
1357  target_param.eps = source_param.eps;
1358  target_param.C = source_param.C;
1359  target_param.nr_weight = source_param.nr_weight;
1360  target_param.nu = source_param.nu;
1361  target_param.p = source_param.p;
1362  target_param.shrinking = source_param.shrinking;
1363  target_param.probability = source_param.probability;
1364 
1365  //Copy any dynamic memory
1366  if( source_param.weight_label != NULL ){
1367 
1368  }
1369  if( source_param.weight != NULL ){
1370 
1371  }
1372 
1373  return true;
1374 }
1375 
1376 bool SVM::loadLegacyModelFromFile( std::fstream &file ){
1377 
1378  std::string word;
1379 
1380  UINT numSV = 0;
1381  UINT halfNumClasses = 0;
1382  numInputDimensions = 0;
1383 
1384  //Init the memory for the model
1385  model = new svm_model;
1386  model->nr_class = 0;
1387  model->l = 0;
1388  model->SV = NULL;
1389  model->sv_coef = NULL;
1390  model->rho = NULL;
1391  model->probA = NULL;
1392  model->probB = NULL;
1393  model->label = NULL;
1394  model->nSV = NULL;
1395  model->label = NULL;
1396  model->nSV = NULL;
1397  model->free_sv = 0; //This will be set to 1 if everything is loaded OK
1398 
1399  //Init the memory for the parameters
1400  model->param.svm_type = 0;
1401  model->param.kernel_type = 0;
1402  model->param.degree = 0;
1403  model->param.gamma = 0;
1404  model->param.coef0 = 0;
1405  model->param.cache_size = 0;
1406  model->param.eps = 0;
1407  model->param.C = 0;
1408  model->param.nr_weight = 0;
1409  model->param.weight_label = NULL;
1410  model->param.weight = NULL;
1411  model->param.nu = 0;
1412  model->param.p = 0;
1413  model->param.shrinking = 0;
1414  model->param.probability = 1;
1415 
1416  //Load the model type
1417  file >> word;
1418  if(word != "ModelType:"){
1419  errorLog << __GRT_LOG__ << " Failed to find ModelType header!" << std::endl;
1420  clear();
1421  return false;
1422  }
1423  file >> word;
1424  if( word == "C_SVC" ){
1425  model->param.svm_type = C_SVC;
1426  }else{
1427  if( word == "NU_SVC" ){
1428  model->param.svm_type = NU_SVC;
1429  }else{
1430  if( word == "ONE_CLASS" ){
1431  model->param.svm_type = ONE_CLASS;
1432  }else{
1433  if( word == "EPSILON_SVR" ){
1434  model->param.svm_type = EPSILON_SVR;
1435  }else{
1436  if( word == "NU_SVR" ){
1437  model->param.svm_type = NU_SVR;
1438  }else{
1439  errorLog << __GRT_LOG__ << " Failed to find SVM type!" << std::endl;
1440  clear();
1441  return false;
1442  }
1443  }
1444  }
1445  }
1446  }
1447 
1448  //Load the model type
1449  file >> word;
1450  if(word != "KernelType:"){
1451  errorLog << __GRT_LOG__ << " Failed to find kernel type!" << std::endl;
1452  clear();
1453  return false;
1454  }
1455  file >> word;
1456  if( word == "LINEAR" ){
1457  model->param.kernel_type = LINEAR;
1458  }else{
1459  if( word == "POLYNOMIAL" ){
1460  model->param.kernel_type = POLY;
1461  }else{
1462  if( word == "RBF" ){
1463  model->param.kernel_type = RBF;
1464  }else{
1465  if( word == "SIGMOID" ){
1466  model->param.kernel_type = SIGMOID;
1467  }else{
1468  if( word == "PRECOMPUTED" ){
1469  model->param.kernel_type = PRECOMPUTED;
1470  }else{
1471  errorLog << __GRT_LOG__ << " Failed to find kernel type!" << std::endl;
1472  clear();
1473  return false;
1474  }
1475  }
1476  }
1477  }
1478  }
1479 
1480  //Load the degree
1481  file >> word;
1482  if(word != "Degree:"){
1483  errorLog << __GRT_LOG__ << " Failed to find Degree header!" << std::endl;
1484  clear();
1485  return false;
1486  }
1487  file >> model->param.degree;
1488 
1489  //Load the gamma
1490  file >> word;
1491  if(word != "Gamma:"){
1492  errorLog << __GRT_LOG__ << " Failed to find Gamma header!" << std::endl;
1493  clear();
1494  return false;
1495  }
1496  file >> model->param.gamma;
1497 
1498  //Load the Coef0
1499  file >> word;
1500  if(word != "Coef0:"){
1501  errorLog << __GRT_LOG__ << " Failed to find Coef0 header!" << std::endl;
1502  clear();
1503  return false;
1504  }
1505  file >> model->param.coef0;
1506 
1507  //Load the NumberOfClasses
1508  file >> word;
1509  if(word != "NumberOfClasses:"){
1510  errorLog << __GRT_LOG__ << " Failed to find NumberOfClasses header!" << std::endl;
1511  clear();
1512  return false;
1513  }
1514  file >> numClasses;
1515 
1516  //Load the NumberOfSupportVectors
1517  file >> word;
1518  if(word != "NumberOfSupportVectors:"){
1519  errorLog << __GRT_LOG__ << " Failed to find NumberOfSupportVectors header!" << std::endl;
1520  clear();
1521  return false;
1522  }
1523  file >> numSV;
1524 
1525  //Load the NumberOfFeatures
1526  file >> word;
1527  if(word != "NumberOfFeatures:"){
1528  errorLog << __GRT_LOG__ << " Failed to find NumberOfFeatures header!" << std::endl;
1529  clear();
1530  return false;
1531  }
1532  file >> numInputDimensions;
1533 
1534  //Load the UseShrinking
1535  file >> word;
1536  if(word != "UseShrinking:"){
1537  errorLog << __GRT_LOG__ << " Failed to find UseShrinking header!" << std::endl;
1538  clear();
1539  return false;
1540  }
1541  file >> model->param.shrinking;
1542 
1543  //Load the UseProbability
1544  file >> word;
1545  if(word != "UseProbability:"){
1546  errorLog << __GRT_LOG__ << " Failed to find UseProbability header!" << std::endl;
1547  clear();
1548  return false;
1549  }
1550  file >> model->param.probability;
1551 
1552  //Load the UseScaling
1553  file >> word;
1554  if(word != "UseScaling:"){
1555  errorLog << __GRT_LOG__ << " Failed to find UseScaling header!" << std::endl;
1556  clear();
1557  return false;
1558  }
1559  file >> useScaling;
1560 
1561  //Load the Ranges
1562  file >> word;
1563  if(word != "Ranges:"){
1564  errorLog << __GRT_LOG__ << " Failed to find Ranges header!" << std::endl;
1565  clear();
1566  return false;
1567  }
1568  //Setup the memory for the ranges
1569  ranges.clear();
1570  ranges.resize(numInputDimensions);
1571 
1573  for(UINT i=0; i<ranges.size(); i++){
1574  file >> ranges[i].minValue;
1575  file >> ranges[i].maxValue;
1576  }
1577 
1578  //Setup the values
1579  halfNumClasses = numClasses*(numClasses-1)/2;
1580  model->nr_class = numClasses;
1581  model->l = numSV;
1582 
1583  //Load the RHO
1584  file >> word;
1585  if(word != "RHO:"){
1586  errorLog << __GRT_LOG__ << " Failed to find RHO header!" << std::endl;
1587  clear();
1588  return false;
1589  }
1590  model->rho = new Float[ halfNumClasses ];
1591  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->rho[i];
1592 
1593  //See if we can load the Labels
1594  file >> word;
1595  if(word != "Label:"){
1596  model->label = NULL;
1597  }else{
1598  model->label = new int[ numClasses ];
1599  for(UINT i=0;i<numClasses;i++) file >> model->label[i];
1600  //We only need to read a new line if we found the label!
1601  file >> word;
1602  }
1603 
1604  //See if we can load the ProbA
1605  //We don't need to read another line here
1606  if(word != "ProbA:"){
1607  model->probA = NULL;
1608  }else{
1609  model->probA = new Float[ halfNumClasses ];
1610  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probA[i];
1611  //We only need to read a new line if we found the label!
1612  file >> word;
1613  }
1614 
1615  //See if we can load the ProbB
1616  //We don't need to read another line here
1617  if(word != "ProbB:"){
1618  model->probB = NULL;
1619  }else{
1620  model->probB = new Float[ halfNumClasses ];
1621  for(UINT i=0;i<numClasses*(numClasses-1)/2;i++) file >> model->probB[i];
1622  //We only need to read a new line if we found the label!
1623  file >> word;
1624  }
1625 
1626  //See if we can load the NumSupportVectorsPerClass
1627  //We don't need to read another line here
1628  if(word != "NumSupportVectorsPerClass:"){
1629  model->nSV = NULL;
1630  }else{
1631  model->nSV = new int[ numClasses ];
1632  for(UINT i=0;i<numClasses;i++) file >> model->nSV[i];
1633  //We only need to read a new line if we found the label!
1634  file >> word;
1635  }
1636 
1637  //Load the SupportVectors
1638  //We don't need to read another line here
1639  if(word != "SupportVectors:"){
1640  errorLog << __GRT_LOG__ << " Failed to find SupportVectors header!" << std::endl;
1641  clear();
1642  return false;
1643  }
1644 
1645  //Setup the memory
1646  model->sv_coef = new Float*[numClasses-1];
1647  for(UINT j=0;j<numClasses-1;j++) model->sv_coef[j] = new Float[numSV];
1648  model->SV = new svm_node*[numSV];
1649 
1650  for(UINT i=0; i<numSV; i++){
1651  for(UINT j=0; j<numClasses-1; j++){
1652  file >> model->sv_coef[j][i];
1653  }
1654 
1655  model->SV[i] = new svm_node[numInputDimensions+1];
1656 
1657  if(model->param.kernel_type == PRECOMPUTED) file >> model->SV[i][0].value;
1658  else{
1659  for(UINT j=0; j<numInputDimensions; j++){
1660  file >> model->SV[i][j].index;
1661  file >> model->SV[i][j].value;
1662  }
1663  model->SV[i][numInputDimensions].index = -1; //Assign the final node value
1664  model->SV[i][numInputDimensions].value = 0;
1665  }
1666  }
1667 
1668  //Set the class labels
1669  this->numClasses = getNumClasses();
1670  classLabels.resize(getNumClasses());
1671  for(UINT k=0; k<getNumClasses(); k++){
1672  classLabels[k] = model->label[k];
1673  }
1674 
1675  //The SV have now been loaded so flag that they should be deleted
1676  model->free_sv = 1;
1677 
1678  //Finally, flag that the model has been trained to show it has been loaded and can be used for prediction
1679  trained = true;
1680 
1681  return true;
1682 }
1683 
1684 GRT_END_NAMESPACE
bool saveBaseSettingsToFile(std::fstream &file) const
Definition: Classifier.cpp:274
std::string getKernelType() const
Definition: SVM.cpp:1020
std::string getId() const
Definition: GRTBase.cpp:85
bool setCoef0(const Float coef0)
Definition: SVM.cpp:1130
#define DEFAULT_NULL_LIKELIHOOD_VALUE
Definition: Classifier.h:33
Float getCrossValidationResult() const
Definition: SVM.cpp:1089
bool enableCrossValidationTraining(const bool useCrossValidation)
Definition: SVM.cpp:1154
bool setC(const Float C)
Definition: SVM.cpp:1135
virtual bool clear()
Definition: SVM.cpp:967
bool loadLegacyModelFromFile(std::fstream &file)
Definition: SVM.cpp:1376
const struct LIBSVM::svm_model * getLIBSVMModel() const
Definition: SVM.cpp:1091
bool init(KernelType kernelType, SVMType svmType, bool useScaling, bool useNullRejection, bool useAutoGamma, Float gamma, UINT degree, Float coef0, Float nu, Float C, bool useCrossValidation, UINT kFoldValue)
Definition: SVM.cpp:243
virtual ~SVM()
Definition: SVM.cpp:87
bool enableAutoGamma(const bool useAutoGamma)
Definition: SVM.cpp:1149
bool getIsCrossValidationTrainingEnabled() const
Definition: SVM.cpp:981
Float getNu() const
Definition: SVM.cpp:1068
#define SVM_MIN_SCALE_RANGE
Definition: SVM.h:34
std::string getSVMType() const
Definition: SVM.cpp:989
bool getIsAutoGammaEnabled() const
Definition: SVM.cpp:985
virtual UINT getNumClasses() const
Definition: SVM.cpp:1049
SVM(KernelType kernelType=LINEAR_KERNEL, SVMType svmType=C_SVC, bool useScaling=true, bool useNullRejection=false, bool useAutoGamma=true, Float gamma=0.1, UINT degree=3, Float coef0=0, Float nu=0.5, Float C=1, bool useCrossValidation=false, UINT kFoldValue=10)
Definition: SVM.cpp:35
virtual bool resize(const unsigned int size)
Definition: Vector.h:133
virtual bool load(std::fstream &file)
Definition: SVM.cpp:662
UINT getSize() const
Definition: Vector.h:201
void initDefaultSVMSettings()
Definition: SVM.cpp:299
virtual bool save(std::fstream &file) const
Definition: SVM.cpp:534
Float getCoef0() const
Definition: SVM.cpp:1075
virtual bool computeAccuracy(const ClassificationData &data, Float &accuracy)
Definition: Classifier.cpp:171
UINT getNumSamples() const
Definition: SVM.h:47
bool setDegree(const UINT degree)
Definition: SVM.cpp:1120
static std::string getId()
Definition: SVM.cpp:30
Definition: libsvm.cpp:4
bool copyBaseVariables(const Classifier *classifier)
Definition: Classifier.cpp:101
bool loadBaseSettingsFromFile(std::fstream &file)
Definition: Classifier.cpp:321
Float getGamma() const
Definition: SVM.cpp:1061
UINT getNumDimensions() const
UINT getNumClasses() const
virtual bool train_(ClassificationData &trainingData)
Definition: SVM.cpp:140
bool setKFoldCrossValidationValue(const UINT kFoldValue)
Definition: SVM.cpp:1140
Vector< MinMax > getRanges() const
ClassificationData split(const UINT splitPercentage, const bool useStratifiedSampling=false)
bool setNu(const Float nu)
Definition: SVM.cpp:1125
bool setGamma(const Float gamma)
Definition: SVM.cpp:1111
UINT getDegree() const
Definition: SVM.cpp:1054
virtual bool deepCopyFrom(const Classifier *classifier)
Definition: SVM.cpp:113
bool scale(const Float minTarget, const Float maxTarget)
bool setKernelType(const KernelType kernelType)
Definition: SVM.cpp:1101
Float getC() const
Definition: SVM.cpp:1082
virtual bool clear()
Definition: Classifier.cpp:151
SVM & operator=(const SVM &rhs)
Definition: SVM.cpp:91
virtual bool predict_(VectorFloat &inputVector)
Definition: SVM.cpp:216
bool setSVMType(const SVMType svmType)
Definition: SVM.cpp:1093
This is the main base class that all GRT Classification algorithms should inherit from...
Definition: Classifier.h:41