GestureRecognitionToolkit  Version: 0.1.0
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
RegressionTree.cpp
1 /*
2 GRT MIT License
3 Copyright (c) <2012> <Nicholas Gillian, Media Lab, MIT>
4 
5 Permission is hereby granted, free of charge, to any person obtaining a copy of this software
6 and associated documentation files (the "Software"), to deal in the Software without restriction,
7 including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
9 subject to the following conditions:
10 
11 The above copyright notice and this permission notice shall be included in all copies or substantial
12 portions of the Software.
13 
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
15 LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
16 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
17 WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
18 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
19 */
20 
21 #include "RegressionTree.h"
22 
23 GRT_BEGIN_NAMESPACE
24 
25 //Register the RegressionTreeNode with the Node base class
26 RegisterNode< RegressionTreeNode > RegressionTreeNode::registerModule("RegressionTreeNode");
27 
28 //Register the RegressionTree module with the Regressifier base class
29 RegisterRegressifierModule< RegressionTree > RegressionTree::registerModule("RegressionTree");
30 
31 RegressionTree::RegressionTree(const UINT numSplittingSteps,const UINT minNumSamplesPerNode,const UINT maxDepth,const bool removeFeaturesAtEachSpilt,const UINT trainingMode,const bool useScaling,const Float minRMSErrorPerNode)
32 {
33  tree = NULL;
34  this->numSplittingSteps = numSplittingSteps;
35  this->minNumSamplesPerNode = minNumSamplesPerNode;
36  this->maxDepth = maxDepth;
37  this->removeFeaturesAtEachSpilt = removeFeaturesAtEachSpilt;
38  this->trainingMode = trainingMode;
39  this->useScaling = useScaling;
40  this->minRMSErrorPerNode = minRMSErrorPerNode;
41  Regressifier::classType = "RegressionTree";
42  regressifierType = Regressifier::classType;
43  Regressifier::debugLog.setProceedingText("[DEBUG RegressionTree]");
44  Regressifier::errorLog.setProceedingText("[ERROR RegressionTree]");
45  Regressifier::trainingLog.setProceedingText("[TRAINING RegressionTree]");
46  Regressifier::warningLog.setProceedingText("[WARNING RegressionTree]");
47 
48 }
49 
51  tree = NULL;
52  Regressifier::classType = "RegressionTree";
53  regressifierType = Regressifier::classType;
54  Regressifier::debugLog.setProceedingText("[DEBUG RegressionTree]");
55  Regressifier::errorLog.setProceedingText("[ERROR RegressionTree]");
56  Regressifier::trainingLog.setProceedingText("[TRAINING RegressionTree]");
57  Regressifier::warningLog.setProceedingText("[WARNING RegressionTree]");
58  *this = rhs;
59 }
60 
62 {
63  clear();
64 }
65 
67  if( this != &rhs ){
68  //Clear this tree
69  this->clear();
70 
71  if( rhs.getTrained() ){
72  //Deep copy the tree
73  this->tree = dynamic_cast< RegressionTreeNode* >( rhs.deepCopyTree() );
74  }
75 
76  this->numSplittingSteps = rhs.numSplittingSteps;
77  this->minNumSamplesPerNode = rhs.minNumSamplesPerNode;
78  this->maxDepth = rhs.maxDepth;
79  this->removeFeaturesAtEachSpilt = rhs.removeFeaturesAtEachSpilt;
80  this->trainingMode = rhs.trainingMode;
82 
83  //Copy the base variables
85  }
86  return *this;
87 }
88 
89 bool RegressionTree::deepCopyFrom(const Regressifier *regressifier){
90 
91  if( regressifier == NULL ) return false;
92 
93  if( this->getRegressifierType() == regressifier->getRegressifierType() ){
94 
95  RegressionTree *ptr = (RegressionTree*)regressifier;
96 
97  //Clear this tree
98  this->clear();
99 
100  if( ptr->getTrained() ){
101  //Deep copy the tree
102  this->tree = dynamic_cast< RegressionTreeNode* >( ptr->deepCopyTree() );
103  }
104 
105  this->numSplittingSteps = ptr->numSplittingSteps;
106  this->minNumSamplesPerNode = ptr->minNumSamplesPerNode;
107  this->maxDepth = ptr->maxDepth;
108  this->removeFeaturesAtEachSpilt = ptr->removeFeaturesAtEachSpilt;
109  this->trainingMode = ptr->trainingMode;
111 
112  //Copy the base variables
113  return copyBaseVariables( regressifier );
114  }
115  return false;
116 }
117 
119 
120  //Clear any previous model
121  clear();
122 
123  const unsigned int M = trainingData.getNumSamples();
124  const unsigned int N = trainingData.getNumInputDimensions();
125  const unsigned int T = trainingData.getNumTargetDimensions();
126 
127  if( M == 0 ){
128  Regressifier::errorLog << "train_(RegressionData &trainingData) - Training data has zero samples!" << std::endl;
129  return false;
130  }
131 
132  numInputDimensions = N;
133  numOutputDimensions = T;
134  inputVectorRanges = trainingData.getInputRanges();
135  targetVectorRanges = trainingData.getTargetRanges();
136 
137  //Scale the training data if needed
138  if( useScaling ){
139  //Scale the training data between 0 and 1
140  trainingData.scale(0, 1);
141  }
142 
143  //Setup the valid features - at this point all features can be used
144  Vector< UINT > features(N);
145  for(UINT i=0; i<N; i++){
146  features[i] = i;
147  }
148 
149  //Build the tree
150  UINT nodeID = 0;
151  tree = buildTree( trainingData, NULL, features, nodeID );
152 
153  if( tree == NULL ){
154  clear();
155  Regressifier::errorLog << "train_(RegressionData &trainingData) - Failed to build tree!" << std::endl;
156  return false;
157  }
158 
159  //Flag that the algorithm has been trained
160  trained = true;
161 
162  return true;
163 }
164 
166 
167  if( !trained ){
168  Regressifier::errorLog << "predict_(VectorFloat &inputVector) - Model Not Trained!" << std::endl;
169  return false;
170  }
171 
172  if( tree == NULL ){
173  Regressifier::errorLog << "predict_(VectorFloat &inputVector) - Tree pointer is null!" << std::endl;
174  return false;
175  }
176 
177  if( inputVector.size() != numInputDimensions ){
178  Regressifier::errorLog << "predict_(VectorFloat &inputVector) - The size of the input Vector (" << inputVector.size() << ") does not match the num features in the model (" << numInputDimensions << std::endl;
179  return false;
180  }
181 
182  if( useScaling ){
183  for(UINT n=0; n<numInputDimensions; n++){
184  inputVector[n] = scale(inputVector[n], inputVectorRanges[n].minValue, inputVectorRanges[n].maxValue, 0, 1);
185  }
186  }
187 
188  if( !tree->predict( inputVector, regressionData ) ){
189  Regressifier::errorLog << "predict_(VectorFloat &inputVector) - Failed to predict!" << std::endl;
190  return false;
191  }
192 
193  return true;
194 }
195 
197 
198  //Clear the Classifier variables
200 
201  if( tree != NULL ){
202  tree->clear();
203  delete tree;
204  tree = NULL;
205  }
206 
207  return true;
208 }
209 
211  if( tree != NULL )
212  return tree->print();
213  return false;
214 }
215 
216 bool RegressionTree::saveModelToFile( std::fstream &file ) const{
217 
218  if(!file.is_open())
219  {
220  Regressifier::errorLog <<"saveModelToFile(fstream &file) - The file is not open!" << std::endl;
221  return false;
222  }
223 
224  //Write the header info
225  file << "GRT_REGRESSION_TREE_MODEL_FILE_V1.0\n";
226 
227  //Write the classifier settings to the file
229  Regressifier::errorLog <<"saveModelToFile(fstream &file) - Failed to save classifier base settings to file!" << std::endl;
230  return false;
231  }
232 
233  file << "NumSplittingSteps: " << numSplittingSteps << std::endl;
234  file << "MinNumSamplesPerNode: " << minNumSamplesPerNode << std::endl;
235  file << "MaxDepth: " << maxDepth << std::endl;
236  file << "RemoveFeaturesAtEachSpilt: " << removeFeaturesAtEachSpilt << std::endl;
237  file << "TrainingMode: " << trainingMode << std::endl;
238  file << "TreeBuilt: " << (tree != NULL ? 1 : 0) << std::endl;
239 
240  if( tree != NULL ){
241  file << "Tree:\n";
242  if( !tree->saveToFile( file ) ){
243  Regressifier::errorLog << "saveModelToFile(fstream &file) - Failed to save tree to file!" << std::endl;
244  return false;
245  }
246  }
247 
248  return true;
249 }
250 
251 bool RegressionTree::loadModelFromFile( std::fstream &file ){
252 
253  clear();
254 
255  if(!file.is_open())
256  {
257  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not open file to load model" << std::endl;
258  return false;
259  }
260 
261  std::string word;
262  file >> word;
263 
264  //Find the file type header
265  if(word != "GRT_REGRESSION_TREE_MODEL_FILE_V1.0"){
266  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not find Model File Header" << std::endl;
267  return false;
268  }
269 
270  //Load the base settings from the file
272  Regressifier::errorLog << "loadModelFromFile(string filename) - Failed to load base settings from file!" << std::endl;
273  return false;
274  }
275 
276  file >> word;
277  if(word != "NumSplittingSteps:"){
278  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not find the NumSplittingSteps!" << std::endl;
279  return false;
280  }
281  file >> numSplittingSteps;
282 
283  file >> word;
284  if(word != "MinNumSamplesPerNode:"){
285  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not find the MinNumSamplesPerNode!" << std::endl;
286  return false;
287  }
288  file >> minNumSamplesPerNode;
289 
290  file >> word;
291  if(word != "MaxDepth:"){
292  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not find the MaxDepth!" << std::endl;
293  return false;
294  }
295  file >> maxDepth;
296 
297  file >> word;
298  if(word != "RemoveFeaturesAtEachSpilt:"){
299  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not find the RemoveFeaturesAtEachSpilt!" << std::endl;
300  return false;
301  }
302  file >> removeFeaturesAtEachSpilt;
303 
304  file >> word;
305  if(word != "TrainingMode:"){
306  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not find the TrainingMode!" << std::endl;
307  return false;
308  }
309  file >> trainingMode;
310 
311  file >> word;
312  if(word != "TreeBuilt:"){
313  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not find the TreeBuilt!" << std::endl;
314  return false;
315  }
316  file >> trained;
317 
318  if( trained ){
319  file >> word;
320  if(word != "Tree:"){
321  Regressifier::errorLog << "loadModelFromFile(string filename) - Could not find the Tree!" << std::endl;
322  return false;
323  }
324 
325  //Create a new tree
326  tree = new RegressionTreeNode;
327 
328  if( tree == NULL ){
329  clear();
330  Regressifier::errorLog << "loadModelFromFile(fstream &file) - Failed to create new RegressionTreeNode!" << std::endl;
331  return false;
332  }
333 
334  tree->setParent( NULL );
335  if( !tree->loadFromFile( file ) ){
336  clear();
337  Regressifier::errorLog << "loadModelFromFile(fstream &file) - Failed to load tree from file!" << std::endl;
338  return false;
339  }
340  }
341 
342  return true;
343 }
344 
346 
347  if( tree == NULL ){
348  return NULL;
349  }
350 
351  return (RegressionTreeNode*)tree->deepCopyNode();
352 }
353 
355  return dynamic_cast< RegressionTreeNode* >( tree );
356 }
357 
359  return minRMSErrorPerNode;
360 }
361 
362 bool RegressionTree::setMinRMSErrorPerNode(const Float minRMSErrorPerNode){
363  this->minRMSErrorPerNode = minRMSErrorPerNode;
364  return true;
365 }
366 
367 RegressionTreeNode* RegressionTree::buildTree(const RegressionData &trainingData,RegressionTreeNode *parent,Vector< UINT > features,UINT nodeID){
368 
369  const UINT M = trainingData.getNumSamples();
370  const UINT N = trainingData.getNumInputDimensions();
371  const UINT T = trainingData.getNumTargetDimensions();
372  VectorFloat regressionData(T);
373 
374  //Update the nodeID
375 
376  //Get the depth
377  UINT depth = 0;
378 
379  if( parent != NULL )
380  depth = parent->getDepth() + 1;
381 
382  //If there are no training data then return NULL
383  if( trainingData.getNumSamples() == 0 )
384  return NULL;
385 
386  //Create the new node
388 
389  if( node == NULL )
390  return NULL;
391 
392  //Set the parent
393  node->initNode( parent, depth, nodeID );
394 
395  //If there are no features left then create a leaf node and return
396  if( features.size() == 0 || M < minNumSamplesPerNode || depth >= maxDepth ){
397 
398  //Flag that this is a leaf node
399  node->setIsLeafNode( true );
400 
401  //Compute the regression data that will be stored at this node
402  computeNodeRegressionData( trainingData, regressionData );
403 
404  //Set the node
405  node->set( trainingData.getNumSamples(), 0, 0, regressionData );
406 
407  Regressifier::trainingLog << "Reached leaf node. Depth: " << depth << " NumSamples: " << trainingData.getNumSamples() << std::endl;
408 
409  return node;
410  }
411 
412  //Compute the best spilt point
413  UINT featureIndex = 0;
414  Float threshold = 0;
415  Float minError = 0;
416  if( !computeBestSpilt( trainingData, features, featureIndex, threshold, minError ) ){
417  delete node;
418  return NULL;
419  }
420 
421  Regressifier::trainingLog << "Depth: " << depth << " FeatureIndex: " << featureIndex << " Threshold: " << threshold << " MinError: " << minError << std::endl;
422 
423  //If the minError is below the minRMSError then create a leaf node and return
424  if( minError <= minRMSErrorPerNode ){
425  //Compute the regression data that will be stored at this node
426  computeNodeRegressionData( trainingData, regressionData );
427 
428  //Set the node
429  node->set( trainingData.getNumSamples(), featureIndex, threshold, regressionData );
430 
431  Regressifier::trainingLog << "Reached leaf node. Depth: " << depth << " NumSamples: " << M << std::endl;
432 
433  return node;
434  }
435 
436  //Set the node
437  node->set( trainingData.getNumSamples(), featureIndex, threshold, regressionData );
438 
439  //Remove the selected feature so we will not use it again
440  if( removeFeaturesAtEachSpilt ){
441  for(UINT i=0; i<features.getSize(); i++){
442  if( features[i] == featureIndex ){
443  features.erase( features.begin()+i );
444  break;
445  }
446  }
447  }
448 
449  //Split the data
450  RegressionData lhs(N,T);
451  RegressionData rhs(N,T);
452 
453  for(UINT i=0; i<M; i++){
454  if( node->predict( trainingData[i].getInputVector() ) ){
455  rhs.addSample(trainingData[i].getInputVector(), trainingData[i].getTargetVector());
456  }else lhs.addSample(trainingData[i].getInputVector(), trainingData[i].getTargetVector());
457  }
458 
459  //Run the recursive tree building on the children
460  node->setLeftChild( buildTree( lhs, node, features, nodeID ) );
461  node->setRightChild( buildTree( rhs, node, features, nodeID ) );
462 
463  return node;
464 }
465 
466 bool RegressionTree::computeBestSpilt( const RegressionData &trainingData, const Vector< UINT > &features, UINT &featureIndex, Float &threshold, Float &minError ){
467 
468  switch( trainingMode ){
469  case BEST_ITERATIVE_SPILT:
470  return computeBestSpiltBestIterativeSpilt( trainingData, features, featureIndex, threshold, minError );
471  break;
472  case BEST_RANDOM_SPLIT:
473  //return computeBestSpiltBestRandomSpilt( trainingData, features, featureIndex, threshold, minError );
474  break;
475  default:
476  Regressifier::errorLog << "Uknown trainingMode!" << std::endl;
477  return false;
478  break;
479  }
480 
481  return false;
482 }
483 
484 bool RegressionTree::computeBestSpiltBestIterativeSpilt( const RegressionData &trainingData, const Vector< UINT > &features, UINT &featureIndex, Float &threshold, Float &minError ){
485 
486  const UINT M = trainingData.getNumSamples();
487  const UINT N = (UINT)features.size();
488 
489  if( N == 0 ) return false;
490 
492  UINT bestFeatureIndex = 0;
493  UINT groupID = 0;
494  Float bestThreshold = 0;
495  Float error = 0;
496  Float minRange = 0;
497  Float maxRange = 0;
498  Float step = 0;
499  Vector< UINT > groupIndex(M);
500  VectorFloat groupCounter(2,0);
501  VectorFloat groupMean(2,0);
502  VectorFloat groupMSE(2,0);
503  Vector< MinMax > ranges = trainingData.getInputRanges();
504 
505  //Loop over each feature and try and find the best split point
506  for(UINT n=0; n<N; n++){
507  minRange = ranges[n].minValue;
508  maxRange = ranges[n].maxValue;
509  step = (maxRange-minRange)/Float(numSplittingSteps);
510  threshold = minRange;
511  featureIndex = features[n];
512  while( threshold <= maxRange ){
513 
514  //Iterate over each sample and work out what group it falls into
515  for(UINT i=0; i<M; i++){
516  groupID = trainingData[i].getInputVector()[featureIndex] >= threshold ? 1 : 0;
517  groupIndex[i] = groupID;
518  groupMean[ groupID ] += trainingData[i].getInputVector()[featureIndex];
519  groupCounter[ groupID ]++;
520  }
521  groupMean[0] /= groupCounter[0] > 0 ? groupCounter[0] : 1;
522  groupMean[1] /= groupCounter[1] > 0 ? groupCounter[1] : 1;
523 
524  //Compute the MSE for each group
525  for(UINT i=0; i<M; i++){
526  groupMSE[ groupIndex[i] ] += grt_sqr( groupMean[ groupIndex[i] ] - trainingData[ i ].getInputVector()[features[n]] );
527  }
528  groupMSE[0] /= groupCounter[0] > 0 ? groupCounter[0] : 1;
529  groupMSE[1] /= groupCounter[1] > 0 ? groupCounter[1] : 1;
530 
531  error = sqrt( groupMSE[0] + groupMSE[1] );
532 
533  //Store the best threshold and feature index
534  if( error < minError ){
535  minError = error;
536  bestThreshold = threshold;
537  bestFeatureIndex = featureIndex;
538  }
539 
540  //Update the threshold
541  threshold += step;
542  }
543  }
544 
545  //Set the best feature index and threshold
546  featureIndex = bestFeatureIndex;
547  threshold = bestThreshold;
548 
549  return true;
550 }
551 
552  /*
553 bool RegressionTree::computeBestSpiltBestRandomSpilt( const RegressionData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &threshold, Float &minError ){
554 
555  const UINT M = trainingData.getNumSamples();
556  const UINT N = (UINT)features.size();
557  const UINT K = (UINT)classLabels.size();
558 
559  if( N == 0 ) return false;
560 
561  minError = numeric_limits<Float>::max();
562  UINT bestFeatureIndex = 0;
563  Float bestThreshold = 0;
564  Float error = 0;
565  Float giniIndexL = 0;
566  Float giniIndexR = 0;
567  Float weightL = 0;
568  Float weightR = 0;
569  Vector< UINT > groupIndex(M);
570  VectorFloat groupCounter(2,0);
571  Vector< MinMax > ranges = trainingData.getRanges();
572 
573  MatrixDouble classProbabilities(K,2);
574 
575  //Loop over each feature and try and find the best split point
576  for(UINT n=0; n<N; n++){
577  for(UINT m=0; m<numSplittingSteps; m++){
578  //Randomly choose the threshold
579  threshold = random.getRandomNumberUniform(ranges[n].minValue,ranges[n].maxValue);
580 
581  //Iterate over each sample and work out if it should be in the lhs (0) or rhs (1) group
582  groupCounter[0] = groupCounter[1] = 0;
583  classProbabilities.setAllValues(0);
584  for(UINT i=0; i<M; i++){
585  groupIndex[i] = trainingData[ i ][ features[n] ] >= threshold ? 1 : 0;
586  groupCounter[ groupIndex[i] ]++;
587  classProbabilities[ getClassLabelIndexValue(trainingData[i].getClassLabel()) ][ groupIndex[i] ]++;
588  }
589 
590  //Compute the class probabilities for the lhs group and rhs group
591  for(UINT k=0; k<K; k++){
592  classProbabilities[k][0] = groupCounter[0]>0 ? classProbabilities[k][0]/groupCounter[0] : 0;
593  classProbabilities[k][1] = groupCounter[1]>0 ? classProbabilities[k][1]/groupCounter[1] : 0;
594  }
595 
596  //Compute the Gini index for the lhs and rhs groups
597  giniIndexL = giniIndexR = 0;
598  for(UINT k=0; k<K; k++){
599  giniIndexL += classProbabilities[k][0] * (1.0-classProbabilities[k][0]);
600  giniIndexR += classProbabilities[k][1] * (1.0-classProbabilities[k][1]);
601  }
602  weightL = groupCounter[0]/M;
603  weightR = groupCounter[1]/M;
604  error = (giniIndexL*weightL) + (giniIndexR*weightR);
605 
606  //Store the best threshold and feature index
607  if( error < minError ){
608  minError = error;
609  bestThreshold = threshold;
610  bestFeatureIndex = n;
611  }
612  }
613  }
614 
615  //Set the best feature index and threshold
616  featureIndex = bestFeatureIndex;
617  threshold = bestThreshold;
618 
619  return true;
620 }
621 
622 */
623 
624  //Compute the regression data that will be stored at this node
625 bool RegressionTree::computeNodeRegressionData( const RegressionData &trainingData, VectorFloat &regressionData ){
626 
627  const UINT M = trainingData.getNumSamples();
628  const UINT N = trainingData.getNumInputDimensions();
629  const UINT T = trainingData.getNumTargetDimensions();
630 
631  if( M == 0 ){
632  Regressifier::errorLog << "computeNodeRegressionData(...) - Failed to compute regression data, there are zero training samples!" << std::endl;
633  return false;
634  }
635 
636  //Make sure the regression data is the correct size
637  regressionData.clear();
638  regressionData.resize( T, 0 );
639 
640  //The regression data at this node is simply an average over all the training data at this node
641  for(unsigned int j=0; j<N; j++){
642  for(unsigned int i=0; i<M; i++){
643  regressionData[j] += trainingData[i].getTargetVector()[j];
644  }
645  regressionData[j] /= M;
646  }
647 
648  return true;
649 }
650 
651 GRT_END_NAMESPACE
652 
virtual bool loadModelFromFile(std::fstream &file)
Float getMinRMSErrorPerNode() const
Float minRMSErrorPerNode
Float scale(const Float &x, const Float &minSource, const Float &maxSource, const Float &minTarget, const Float &maxTarget, const bool constrain=false)
Definition: MLBase.h:339
This class implements a basic Regression Tree.
virtual bool train_(RegressionData &trainingData)
Vector< MinMax > getInputRanges() const
virtual bool print() const
Definition: Node.cpp:108
const RegressionTreeNode * getTree() const
virtual bool resize(const unsigned int size)
Definition: Vector.h:133
bool getTrained() const
Definition: MLBase.cpp:254
UINT getDepth() const
Definition: Node.cpp:307
bool copyBaseVariables(const Regressifier *regressifier)
UINT getNumInputDimensions() const
bool set(const UINT nodeSize, const UINT featureIndex, const Float threshold, const VectorFloat &regressionData)
virtual bool clear()
unsigned int getSize() const
Definition: Vector.h:193
Vector< MinMax > getTargetRanges() const
bool saveBaseSettingsToFile(std::fstream &file) const
bool scale(const Float minTarget, const Float maxTarget)
UINT getNumTargetDimensions() const
virtual bool predict(const VectorFloat &x)
std::string getRegressifierType() const
virtual Node * deepCopyNode() const
Definition: Node.cpp:275
bool setMinRMSErrorPerNode(const Float minRMSErrorPerNode)
virtual bool saveModelToFile(std::fstream &file) const
virtual bool saveToFile(std::fstream &file) const
Definition: Node.cpp:139
virtual bool deepCopyFrom(const Regressifier *regressifier)
virtual bool loadFromFile(std::fstream &file)
Definition: Node.cpp:181
RegressionTree & operator=(const RegressionTree &rhs)
bool loadBaseSettingsFromFile(std::fstream &file)
virtual bool clear()
Definition: Node.cpp:69
RegressionTreeNode * deepCopyTree() const
virtual bool predict_(VectorFloat &inputVector)
RegressionTree(const UINT numSplittingSteps=100, const UINT minNumSamplesPerNode=5, const UINT maxDepth=10, const bool removeFeaturesAtEachSpilt=false, const UINT trainingMode=BEST_ITERATIVE_SPILT, const bool useScaling=false, const Float minRMSErrorPerNode=0.01)
virtual ~RegressionTree(void)
virtual bool print() const
virtual bool predict(const VectorFloat &x)
Definition: Node.cpp:59
UINT getNumSamples() const
virtual bool clear()