GestureRecognitionToolkit  Version: 0.2.0
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
RegressionTree.cpp
1 /*
2 GRT MIT License
3 Copyright (c) <2012> <Nicholas Gillian, Media Lab, MIT>
4 
5 Permission is hereby granted, free of charge, to any person obtaining a copy of this software
6 and associated documentation files (the "Software"), to deal in the Software without restriction,
7 including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
9 subject to the following conditions:
10 
11 The above copyright notice and this permission notice shall be included in all copies or substantial
12 portions of the Software.
13 
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
15 LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
16 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
17 WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
18 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
19 */
20 
21 #define GRT_DLL_EXPORTS
22 #include "RegressionTree.h"
23 
24 GRT_BEGIN_NAMESPACE
25 
26 //Register the RegressionTreeNode with the Node base class
27 RegisterNode< RegressionTreeNode > RegressionTreeNode::registerModule("RegressionTreeNode");
28 
29 //Register the RegressionTree module with the Regressifier base class
30 RegisterRegressifierModule< RegressionTree > RegressionTree::registerModule("RegressionTree");
31 
32 RegressionTree::RegressionTree(const UINT numSplittingSteps,const UINT minNumSamplesPerNode,const UINT maxDepth,const bool removeFeaturesAtEachSpilt,const UINT trainingMode,const bool useScaling,const Float minRMSErrorPerNode)
33 {
34  tree = NULL;
35  this->numSplittingSteps = numSplittingSteps;
36  this->minNumSamplesPerNode = minNumSamplesPerNode;
37  this->maxDepth = maxDepth;
38  this->removeFeaturesAtEachSpilt = removeFeaturesAtEachSpilt;
39  this->trainingMode = trainingMode;
40  this->useScaling = useScaling;
41  this->minRMSErrorPerNode = minRMSErrorPerNode;
42  Regressifier::classType = "RegressionTree";
43  regressifierType = Regressifier::classType;
44  Regressifier::debugLog.setProceedingText("[DEBUG RegressionTree]");
45  Regressifier::errorLog.setProceedingText("[ERROR RegressionTree]");
46  Regressifier::trainingLog.setProceedingText("[TRAINING RegressionTree]");
47  Regressifier::warningLog.setProceedingText("[WARNING RegressionTree]");
48 
49 }
50 
52  tree = NULL;
53  Regressifier::classType = "RegressionTree";
54  regressifierType = Regressifier::classType;
55  Regressifier::debugLog.setProceedingText("[DEBUG RegressionTree]");
56  Regressifier::errorLog.setProceedingText("[ERROR RegressionTree]");
57  Regressifier::trainingLog.setProceedingText("[TRAINING RegressionTree]");
58  Regressifier::warningLog.setProceedingText("[WARNING RegressionTree]");
59  *this = rhs;
60 }
61 
63 {
64  clear();
65 }
66 
68  if( this != &rhs ){
69  //Clear this tree
70  this->clear();
71 
72  if( rhs.getTrained() ){
73  //Deep copy the tree
74  this->tree = dynamic_cast< RegressionTreeNode* >( rhs.deepCopyTree() );
75  }
76 
77  this->numSplittingSteps = rhs.numSplittingSteps;
78  this->minNumSamplesPerNode = rhs.minNumSamplesPerNode;
79  this->maxDepth = rhs.maxDepth;
80  this->removeFeaturesAtEachSpilt = rhs.removeFeaturesAtEachSpilt;
81  this->trainingMode = rhs.trainingMode;
83 
84  //Copy the base variables
86  }
87  return *this;
88 }
89 
90 bool RegressionTree::deepCopyFrom(const Regressifier *regressifier){
91 
92  if( regressifier == NULL ) return false;
93 
94  if( this->getRegressifierType() == regressifier->getRegressifierType() ){
95 
96  RegressionTree *ptr = (RegressionTree*)regressifier;
97 
98  //Clear this tree
99  this->clear();
100 
101  if( ptr->getTrained() ){
102  //Deep copy the tree
103  this->tree = dynamic_cast< RegressionTreeNode* >( ptr->deepCopyTree() );
104  }
105 
106  this->numSplittingSteps = ptr->numSplittingSteps;
107  this->minNumSamplesPerNode = ptr->minNumSamplesPerNode;
108  this->maxDepth = ptr->maxDepth;
109  this->removeFeaturesAtEachSpilt = ptr->removeFeaturesAtEachSpilt;
110  this->trainingMode = ptr->trainingMode;
112 
113  //Copy the base variables
114  return copyBaseVariables( regressifier );
115  }
116  return false;
117 }
118 
120 
121  //Clear any previous model
122  clear();
123 
124  const unsigned int M = trainingData.getNumSamples();
125  const unsigned int N = trainingData.getNumInputDimensions();
126  const unsigned int T = trainingData.getNumTargetDimensions();
127 
128  if( M == 0 ){
129  Regressifier::errorLog << "train_(RegressionData &trainingData) - Training data has zero samples!" << std::endl;
130  return false;
131  }
132 
133  numInputDimensions = N;
134  numOutputDimensions = T;
135  inputVectorRanges = trainingData.getInputRanges();
136  targetVectorRanges = trainingData.getTargetRanges();
137 
138  //Scale the training data if needed
139  if( useScaling ){
140  //Scale the training data between 0 and 1
141  trainingData.scale(0, 1);
142  }
143 
144  //Setup the valid features - at this point all features can be used
145  Vector< UINT > features(N);
146  for(UINT i=0; i<N; i++){
147  features[i] = i;
148  }
149 
150  //Build the tree
151  UINT nodeID = 0;
152  tree = buildTree( trainingData, NULL, features, nodeID );
153 
154  if( tree == NULL ){
155  clear();
156  Regressifier::errorLog << "train_(RegressionData &trainingData) - Failed to build tree!" << std::endl;
157  return false;
158  }
159 
160  //Flag that the algorithm has been trained
161  trained = true;
162 
163  return true;
164 }
165 
167 
168  if( !trained ){
169  Regressifier::errorLog << "predict_(VectorFloat &inputVector) - Model Not Trained!" << std::endl;
170  return false;
171  }
172 
173  if( tree == NULL ){
174  Regressifier::errorLog << "predict_(VectorFloat &inputVector) - Tree pointer is null!" << std::endl;
175  return false;
176  }
177 
178  if( inputVector.size() != numInputDimensions ){
179  Regressifier::errorLog << "predict_(VectorFloat &inputVector) - The size of the input Vector (" << inputVector.size() << ") does not match the num features in the model (" << numInputDimensions << std::endl;
180  return false;
181  }
182 
183  if( useScaling ){
184  for(UINT n=0; n<numInputDimensions; n++){
185  inputVector[n] = scale(inputVector[n], inputVectorRanges[n].minValue, inputVectorRanges[n].maxValue, 0, 1);
186  }
187  }
188 
189  if( !tree->predict( inputVector, regressionData ) ){
190  Regressifier::errorLog << "predict_(VectorFloat &inputVector) - Failed to predict!" << std::endl;
191  return false;
192  }
193 
194  return true;
195 }
196 
198 
199  //Clear the Classifier variables
201 
202  if( tree != NULL ){
203  tree->clear();
204  delete tree;
205  tree = NULL;
206  }
207 
208  return true;
209 }
210 
212  if( tree != NULL )
213  return tree->print();
214  return false;
215 }
216 
217 bool RegressionTree::save( std::fstream &file ) const{
218 
219  if(!file.is_open())
220  {
221  Regressifier::errorLog <<"save(fstream &file) - The file is not open!" << std::endl;
222  return false;
223  }
224 
225  //Write the header info
226  file << "GRT_REGRESSION_TREE_MODEL_FILE_V1.0\n";
227 
228  //Write the classifier settings to the file
230  Regressifier::errorLog <<"save(fstream &file) - Failed to save classifier base settings to file!" << std::endl;
231  return false;
232  }
233 
234  file << "NumSplittingSteps: " << numSplittingSteps << std::endl;
235  file << "MinNumSamplesPerNode: " << minNumSamplesPerNode << std::endl;
236  file << "MaxDepth: " << maxDepth << std::endl;
237  file << "RemoveFeaturesAtEachSpilt: " << removeFeaturesAtEachSpilt << std::endl;
238  file << "TrainingMode: " << trainingMode << std::endl;
239  file << "TreeBuilt: " << (tree != NULL ? 1 : 0) << std::endl;
240 
241  if( tree != NULL ){
242  file << "Tree:\n";
243  if( !tree->save( file ) ){
244  Regressifier::errorLog << "save(fstream &file) - Failed to save tree to file!" << std::endl;
245  return false;
246  }
247  }
248 
249  return true;
250 }
251 
252 bool RegressionTree::load( std::fstream &file ){
253 
254  clear();
255 
256  if(!file.is_open())
257  {
258  Regressifier::errorLog << "load(string filename) - Could not open file to load model" << std::endl;
259  return false;
260  }
261 
262  std::string word;
263  file >> word;
264 
265  //Find the file type header
266  if(word != "GRT_REGRESSION_TREE_MODEL_FILE_V1.0"){
267  Regressifier::errorLog << "load(string filename) - Could not find Model File Header" << std::endl;
268  return false;
269  }
270 
271  //Load the base settings from the file
273  Regressifier::errorLog << "load(string filename) - Failed to load base settings from file!" << std::endl;
274  return false;
275  }
276 
277  file >> word;
278  if(word != "NumSplittingSteps:"){
279  Regressifier::errorLog << "load(string filename) - Could not find the NumSplittingSteps!" << std::endl;
280  return false;
281  }
282  file >> numSplittingSteps;
283 
284  file >> word;
285  if(word != "MinNumSamplesPerNode:"){
286  Regressifier::errorLog << "load(string filename) - Could not find the MinNumSamplesPerNode!" << std::endl;
287  return false;
288  }
289  file >> minNumSamplesPerNode;
290 
291  file >> word;
292  if(word != "MaxDepth:"){
293  Regressifier::errorLog << "load(string filename) - Could not find the MaxDepth!" << std::endl;
294  return false;
295  }
296  file >> maxDepth;
297 
298  file >> word;
299  if(word != "RemoveFeaturesAtEachSpilt:"){
300  Regressifier::errorLog << "load(string filename) - Could not find the RemoveFeaturesAtEachSpilt!" << std::endl;
301  return false;
302  }
303  file >> removeFeaturesAtEachSpilt;
304 
305  file >> word;
306  if(word != "TrainingMode:"){
307  Regressifier::errorLog << "load(string filename) - Could not find the TrainingMode!" << std::endl;
308  return false;
309  }
310  file >> trainingMode;
311 
312  file >> word;
313  if(word != "TreeBuilt:"){
314  Regressifier::errorLog << "load(string filename) - Could not find the TreeBuilt!" << std::endl;
315  return false;
316  }
317  file >> trained;
318 
319  if( trained ){
320  file >> word;
321  if(word != "Tree:"){
322  Regressifier::errorLog << "load(string filename) - Could not find the Tree!" << std::endl;
323  return false;
324  }
325 
326  //Create a new tree
327  tree = new RegressionTreeNode;
328 
329  if( tree == NULL ){
330  clear();
331  Regressifier::errorLog << "load(fstream &file) - Failed to create new RegressionTreeNode!" << std::endl;
332  return false;
333  }
334 
335  tree->setParent( NULL );
336  if( !tree->load( file ) ){
337  clear();
338  Regressifier::errorLog << "load(fstream &file) - Failed to load tree from file!" << std::endl;
339  return false;
340  }
341  }
342 
343  return true;
344 }
345 
347 
348  if( tree == NULL ){
349  return NULL;
350  }
351 
352  return (RegressionTreeNode*)tree->deepCopyNode();
353 }
354 
356  return dynamic_cast< RegressionTreeNode* >( tree );
357 }
358 
360  return minRMSErrorPerNode;
361 }
362 
363 bool RegressionTree::setMinRMSErrorPerNode(const Float minRMSErrorPerNode){
364  this->minRMSErrorPerNode = minRMSErrorPerNode;
365  return true;
366 }
367 
368 RegressionTreeNode* RegressionTree::buildTree(const RegressionData &trainingData,RegressionTreeNode *parent,Vector< UINT > features,UINT nodeID){
369 
370  const UINT M = trainingData.getNumSamples();
371  const UINT N = trainingData.getNumInputDimensions();
372  const UINT T = trainingData.getNumTargetDimensions();
373  VectorFloat regressionData(T);
374 
375  //Update the nodeID
376 
377  //Get the depth
378  UINT depth = 0;
379 
380  if( parent != NULL )
381  depth = parent->getDepth() + 1;
382 
383  //If there are no training data then return NULL
384  if( trainingData.getNumSamples() == 0 )
385  return NULL;
386 
387  //Create the new node
389 
390  if( node == NULL )
391  return NULL;
392 
393  //Set the parent
394  node->initNode( parent, depth, nodeID );
395 
396  //If there are no features left then create a leaf node and return
397  if( features.size() == 0 || M < minNumSamplesPerNode || depth >= maxDepth ){
398 
399  //Flag that this is a leaf node
400  node->setIsLeafNode( true );
401 
402  //Compute the regression data that will be stored at this node
403  computeNodeRegressionData( trainingData, regressionData );
404 
405  //Set the node
406  node->set( trainingData.getNumSamples(), 0, 0, regressionData );
407 
408  Regressifier::trainingLog << "Reached leaf node. Depth: " << depth << " NumSamples: " << trainingData.getNumSamples() << std::endl;
409 
410  return node;
411  }
412 
413  //Compute the best spilt point
414  UINT featureIndex = 0;
415  Float threshold = 0;
416  Float minError = 0;
417  if( !computeBestSpilt( trainingData, features, featureIndex, threshold, minError ) ){
418  delete node;
419  return NULL;
420  }
421 
422  Regressifier::trainingLog << "Depth: " << depth << " FeatureIndex: " << featureIndex << " Threshold: " << threshold << " MinError: " << minError << std::endl;
423 
424  //If the minError is below the minRMSError then create a leaf node and return
425  if( minError <= minRMSErrorPerNode ){
426  //Compute the regression data that will be stored at this node
427  computeNodeRegressionData( trainingData, regressionData );
428 
429  //Set the node
430  node->set( trainingData.getNumSamples(), featureIndex, threshold, regressionData );
431 
432  Regressifier::trainingLog << "Reached leaf node. Depth: " << depth << " NumSamples: " << M << std::endl;
433 
434  return node;
435  }
436 
437  //Set the node
438  node->set( trainingData.getNumSamples(), featureIndex, threshold, regressionData );
439 
440  //Remove the selected feature so we will not use it again
441  if( removeFeaturesAtEachSpilt ){
442  for(UINT i=0; i<features.getSize(); i++){
443  if( features[i] == featureIndex ){
444  features.erase( features.begin()+i );
445  break;
446  }
447  }
448  }
449 
450  //Split the data
451  RegressionData lhs(N,T);
452  RegressionData rhs(N,T);
453 
454  for(UINT i=0; i<M; i++){
455  if( node->predict( trainingData[i].getInputVector() ) ){
456  rhs.addSample(trainingData[i].getInputVector(), trainingData[i].getTargetVector());
457  }else lhs.addSample(trainingData[i].getInputVector(), trainingData[i].getTargetVector());
458  }
459 
460  //Run the recursive tree building on the children
461  node->setLeftChild( buildTree( lhs, node, features, nodeID ) );
462  node->setRightChild( buildTree( rhs, node, features, nodeID ) );
463 
464  return node;
465 }
466 
467 bool RegressionTree::computeBestSpilt( const RegressionData &trainingData, const Vector< UINT > &features, UINT &featureIndex, Float &threshold, Float &minError ){
468 
469  switch( trainingMode ){
470  case BEST_ITERATIVE_SPILT:
471  return computeBestSpiltBestIterativeSpilt( trainingData, features, featureIndex, threshold, minError );
472  break;
473  case BEST_RANDOM_SPLIT:
474  //return computeBestSpiltBestRandomSpilt( trainingData, features, featureIndex, threshold, minError );
475  break;
476  default:
477  Regressifier::errorLog << "Uknown trainingMode!" << std::endl;
478  return false;
479  break;
480  }
481 
482  return false;
483 }
484 
485 bool RegressionTree::computeBestSpiltBestIterativeSpilt( const RegressionData &trainingData, const Vector< UINT > &features, UINT &featureIndex, Float &threshold, Float &minError ){
486 
487  const UINT M = trainingData.getNumSamples();
488  const UINT N = (UINT)features.size();
489 
490  if( N == 0 ) return false;
491 
493  UINT bestFeatureIndex = 0;
494  UINT groupID = 0;
495  Float bestThreshold = 0;
496  Float error = 0;
497  Float minRange = 0;
498  Float maxRange = 0;
499  Float step = 0;
500  Vector< UINT > groupIndex(M);
501  VectorFloat groupCounter(2,0);
502  VectorFloat groupMean(2,0);
503  VectorFloat groupMSE(2,0);
504  Vector< MinMax > ranges = trainingData.getInputRanges();
505 
506  //Loop over each feature and try and find the best split point
507  for(UINT n=0; n<N; n++){
508  minRange = ranges[n].minValue;
509  maxRange = ranges[n].maxValue;
510  step = (maxRange-minRange)/Float(numSplittingSteps);
511  threshold = minRange;
512  featureIndex = features[n];
513  while( threshold <= maxRange ){
514 
515  //Iterate over each sample and work out what group it falls into
516  for(UINT i=0; i<M; i++){
517  groupID = trainingData[i].getInputVector()[featureIndex] >= threshold ? 1 : 0;
518  groupIndex[i] = groupID;
519  groupMean[ groupID ] += trainingData[i].getInputVector()[featureIndex];
520  groupCounter[ groupID ]++;
521  }
522  groupMean[0] /= groupCounter[0] > 0 ? groupCounter[0] : 1;
523  groupMean[1] /= groupCounter[1] > 0 ? groupCounter[1] : 1;
524 
525  //Compute the MSE for each group
526  for(UINT i=0; i<M; i++){
527  groupMSE[ groupIndex[i] ] += grt_sqr( groupMean[ groupIndex[i] ] - trainingData[ i ].getInputVector()[features[n]] );
528  }
529  groupMSE[0] /= groupCounter[0] > 0 ? groupCounter[0] : 1;
530  groupMSE[1] /= groupCounter[1] > 0 ? groupCounter[1] : 1;
531 
532  error = sqrt( groupMSE[0] + groupMSE[1] );
533 
534  //Store the best threshold and feature index
535  if( error < minError ){
536  minError = error;
537  bestThreshold = threshold;
538  bestFeatureIndex = featureIndex;
539  }
540 
541  //Update the threshold
542  threshold += step;
543  }
544  }
545 
546  //Set the best feature index and threshold
547  featureIndex = bestFeatureIndex;
548  threshold = bestThreshold;
549 
550  return true;
551 }
552 
553 /*
554 bool RegressionTree::computeBestSpiltBestRandomSpilt( const RegressionData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &threshold, Float &minError ){
555 
556 const UINT M = trainingData.getNumSamples();
557 const UINT N = (UINT)features.size();
558 const UINT K = (UINT)classLabels.size();
559 
560 if( N == 0 ) return false;
561 
562 minError = numeric_limits<Float>::max();
563 UINT bestFeatureIndex = 0;
564 Float bestThreshold = 0;
565 Float error = 0;
566 Float giniIndexL = 0;
567 Float giniIndexR = 0;
568 Float weightL = 0;
569 Float weightR = 0;
570 Vector< UINT > groupIndex(M);
571 VectorFloat groupCounter(2,0);
572 Vector< MinMax > ranges = trainingData.getRanges();
573 
574 MatrixDouble classProbabilities(K,2);
575 
576 //Loop over each feature and try and find the best split point
577 for(UINT n=0; n<N; n++){
578 for(UINT m=0; m<numSplittingSteps; m++){
579 //Randomly choose the threshold
580 threshold = random.getRandomNumberUniform(ranges[n].minValue,ranges[n].maxValue);
581 
582 //Iterate over each sample and work out if it should be in the lhs (0) or rhs (1) group
583 groupCounter[0] = groupCounter[1] = 0;
584 classProbabilities.setAllValues(0);
585 for(UINT i=0; i<M; i++){
586 groupIndex[i] = trainingData[ i ][ features[n] ] >= threshold ? 1 : 0;
587 groupCounter[ groupIndex[i] ]++;
588 classProbabilities[ getClassLabelIndexValue(trainingData[i].getClassLabel()) ][ groupIndex[i] ]++;
589 }
590 
591 //Compute the class probabilities for the lhs group and rhs group
592 for(UINT k=0; k<K; k++){
593 classProbabilities[k][0] = groupCounter[0]>0 ? classProbabilities[k][0]/groupCounter[0] : 0;
594 classProbabilities[k][1] = groupCounter[1]>0 ? classProbabilities[k][1]/groupCounter[1] : 0;
595 }
596 
597 //Compute the Gini index for the lhs and rhs groups
598 giniIndexL = giniIndexR = 0;
599 for(UINT k=0; k<K; k++){
600 giniIndexL += classProbabilities[k][0] * (1.0-classProbabilities[k][0]);
601 giniIndexR += classProbabilities[k][1] * (1.0-classProbabilities[k][1]);
602 }
603 weightL = groupCounter[0]/M;
604 weightR = groupCounter[1]/M;
605 error = (giniIndexL*weightL) + (giniIndexR*weightR);
606 
607 //Store the best threshold and feature index
608 if( error < minError ){
609 minError = error;
610 bestThreshold = threshold;
611 bestFeatureIndex = n;
612 }
613 }
614 }
615 
616 //Set the best feature index and threshold
617 featureIndex = bestFeatureIndex;
618 threshold = bestThreshold;
619 
620 return true;
621 }
622 
623 */
624 
625 //Compute the regression data that will be stored at this node
626 bool RegressionTree::computeNodeRegressionData( const RegressionData &trainingData, VectorFloat &regressionData ){
627 
628  const UINT M = trainingData.getNumSamples();
629  const UINT N = trainingData.getNumInputDimensions();
630  const UINT T = trainingData.getNumTargetDimensions();
631 
632  if( M == 0 ){
633  Regressifier::errorLog << "computeNodeRegressionData(...) - Failed to compute regression data, there are zero training samples!" << std::endl;
634  return false;
635  }
636 
637  //Make sure the regression data is the correct size
638  regressionData.clear();
639  regressionData.resize( T, 0 );
640 
641  //The regression data at this node is simply an average over all the training data at this node
642  for(unsigned int j=0; j<N; j++){
643  for(unsigned int i=0; i<M; i++){
644  regressionData[j] += trainingData[i].getTargetVector()[j];
645  }
646  regressionData[j] /= M;
647  }
648 
649  return true;
650 }
651 
652 GRT_END_NAMESPACE
Float getMinRMSErrorPerNode() const
Float minRMSErrorPerNode
Float scale(const Float &x, const Float &minSource, const Float &maxSource, const Float &minTarget, const Float &maxTarget, const bool constrain=false)
Definition: MLBase.h:353
This class implements a basic Regression Tree.
virtual bool train_(RegressionData &trainingData)
Vector< MinMax > getInputRanges() const
virtual bool print() const
Definition: Node.cpp:109
const RegressionTreeNode * getTree() const
virtual bool resize(const unsigned int size)
Definition: Vector.h:133
bool getTrained() const
Definition: MLBase.cpp:259
UINT getDepth() const
Definition: Node.cpp:308
virtual bool save(std::fstream &file) const
UINT getSize() const
Definition: Vector.h:191
bool copyBaseVariables(const Regressifier *regressifier)
UINT getNumInputDimensions() const
bool set(const UINT nodeSize, const UINT featureIndex, const Float threshold, const VectorFloat &regressionData)
virtual bool clear()
virtual bool load(std::fstream &file)
Vector< MinMax > getTargetRanges() const
bool saveBaseSettingsToFile(std::fstream &file) const
bool scale(const Float minTarget, const Float maxTarget)
virtual bool save(std::fstream &file) const
Definition: Node.cpp:140
UINT getNumTargetDimensions() const
virtual bool predict(const VectorFloat &x)
std::string getRegressifierType() const
virtual Node * deepCopyNode() const
Definition: Node.cpp:276
bool setMinRMSErrorPerNode(const Float minRMSErrorPerNode)
virtual bool deepCopyFrom(const Regressifier *regressifier)
RegressionTree & operator=(const RegressionTree &rhs)
bool loadBaseSettingsFromFile(std::fstream &file)
virtual bool clear()
Definition: Node.cpp:70
RegressionTreeNode * deepCopyTree() const
virtual bool predict_(VectorFloat &inputVector)
RegressionTree(const UINT numSplittingSteps=100, const UINT minNumSamplesPerNode=5, const UINT maxDepth=10, const bool removeFeaturesAtEachSpilt=false, const UINT trainingMode=BEST_ITERATIVE_SPILT, const bool useScaling=false, const Float minRMSErrorPerNode=0.01)
virtual ~RegressionTree(void)
virtual bool load(std::fstream &file)
Definition: Node.cpp:182
virtual bool print() const
virtual bool predict(const VectorFloat &x)
Definition: Node.cpp:60
UINT getNumSamples() const
virtual bool clear()