GestureRecognitionToolkit  Version: 0.2.5
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
DecisionTreeTripleFeatureNode.cpp
1 
2 #define GRT_DLL_EXPORTS
4 
5 GRT_BEGIN_NAMESPACE
6 
7 //Register the DecisionTreeTripleFeatureNode module with the Node base class
8 RegisterNode< DecisionTreeTripleFeatureNode > DecisionTreeTripleFeatureNode::registerModule("DecisionTreeTripleFeatureNode");
9 
11  clear();
12 }
13 
15  clear();
16 }
17 
19 
20  if( (x[ featureIndexA ] - x[ featureIndexB ]) >= (x[ featureIndexC ] - x[ featureIndexB ]) ) return true;
21 
22  return false;
23 }
24 
26 
27  //Call the base class clear function
29 
30  featureIndexA = 0;
31  featureIndexB = 0;
32  featureIndexC = 0;
33 
34  return true;
35 }
36 
38 
39  std::ostringstream stream;
40 
41  if( getModel( stream ) ){
42  std::cout << stream.str();
43  return true;
44  }
45 
46  return false;
47 }
48 
49 bool DecisionTreeTripleFeatureNode::getModel( std::ostream &stream ) const{
50 
51  std::string tab = "";
52  for(UINT i=0; i<depth; i++) tab += "\t";
53 
54  stream << tab << "depth: " << depth;
55  stream << " nodeSize: " << nodeSize;
56  stream << " featureIndexA: " << featureIndexA;
57  stream << " featureIndexB: " << featureIndexB;
58  stream << " featureIndexC: " << featureIndexC;
59  stream << " isLeafNode: " << isLeafNode << std::endl;
60 
61  stream << tab << "ClassProbabilities: ";
62  for(UINT i=0; i<classProbabilities.size(); i++){
63  stream << classProbabilities[i] << "\t";
64  }
65  stream << std::endl;
66 
67  if( leftChild != NULL ){
68  stream << tab << "LeftChild: " << std::endl;
69  leftChild->getModel( stream );
70  }
71 
72  if( rightChild != NULL ){
73  stream << tab << "RightChild: " << std::endl;
74  rightChild->getModel( stream );
75  }
76 
77  return true;
78 }
79 
81 
83 
84  if( node == NULL ){
85  return NULL;
86  }
87 
88  //Copy this node into the node
89  node->depth = depth;
90  node->isLeafNode = isLeafNode;
91  node->nodeID = nodeID;
92  node->predictedNodeID = predictedNodeID;
93  node->nodeSize = nodeSize;
94  node->featureIndexA = featureIndexA;
95  node->featureIndexB = featureIndexB;
96  node->featureIndexC = featureIndexC;
97  node->classProbabilities = classProbabilities;
98 
99  //Recursively deep copy the left child
100  if( leftChild ){
101  node->leftChild = leftChild->deepCopy();
102  node->leftChild->setParent( node );
103  }
104 
105  //Recursively deep copy the right child
106  if( rightChild ){
107  node->rightChild = rightChild->deepCopy();
108  node->rightChild->setParent( node );
109  }
110 
111  return dynamic_cast< Node* >( node );
112 }
113 
115  return featureIndexA;
116 }
117 
119  return featureIndexB;
120 }
121 
123  return featureIndexC;
124 }
125 
126 bool DecisionTreeTripleFeatureNode::set(const UINT nodeSize,const UINT featureIndexA,const UINT featureIndexB,const UINT featureIndexC,const VectorFloat &classProbabilities){
127  this->nodeSize = nodeSize;
128  this->featureIndexA = featureIndexA;
129  this->featureIndexB = featureIndexB;
130  this->featureIndexC = featureIndexC;
131  this->classProbabilities = classProbabilities;
132  return true;
133 }
134 
135 bool DecisionTreeTripleFeatureNode::computeBestSplitBestIterativeSplit( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
136  return computeSplit( numSplittingSteps, trainingData, features, classLabels, featureIndex, minError);
137 }
138 
139 bool DecisionTreeTripleFeatureNode::computeBestSplitBestRandomSplit( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
140  return computeSplit( numSplittingSteps, trainingData, features, classLabels, featureIndex, minError);
141 }
142 
143 bool DecisionTreeTripleFeatureNode::computeSplit( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
144 
145  const UINT M = trainingData.getNumSamples();
146  const UINT N = features.getSize();
147  const UINT K = classLabels.getSize();
148 
149  if( N == 0 ) return false;
150 
152  Random random;
153  UINT bestFeatureIndexA = 0;
154  UINT bestFeatureIndexB = 0;
155  UINT bestFeatureIndexC = 0;
156  Float error = 0;
157  Float giniIndexL = 0;
158  Float giniIndexR = 0;
159  Float weightL = 0;
160  Float weightR = 0;
161  Vector< UINT > groupIndex(M);
162  VectorFloat groupCounter(2,0);
163  Vector< MinMax > ranges = trainingData.getRanges();
164  MatrixFloat classProbabilities(K,2);
165  MatrixFloat data(M,1); //This will store our temporary data for each dimension
166 
167  //Randomly select which features we want to use
168  UINT numRandomFeatures = numSplittingSteps > N ? N : numSplittingSteps;
169  Vector< UINT > randomFeatures = random.getRandomSubset( 0, N, numRandomFeatures );
170 
171  //Loop over each random feature and try and find the best split point
172  for(UINT n=0; n<numRandomFeatures; n++){
173 
174  //Randomly select 3 features to use
175  featureIndexB = features[ randomFeatures[n] ]; //B is the central feature
176  featureIndexA = features[ randomFeatures[ random.getRandomNumberInt(0,numRandomFeatures) ] ];
177  featureIndexC = features[ randomFeatures[ random.getRandomNumberInt(0,numRandomFeatures) ] ];
178 
179  //Iterate over each sample and work out if it should be in the lhs (0) or rhs (1) group based on the current threshold
180  groupCounter[0] = groupCounter[1] = 0;
181  classProbabilities.setAllValues(0);
182  for(UINT i=0; i<M; i++){
183  groupIndex[i] = predict( trainingData[i].getSample() ) ? 1 : 0;
184  groupCounter[ groupIndex[i] ]++;
185  classProbabilities[ getClassLabelIndexValue(trainingData[i].getClassLabel(),classLabels) ][ groupIndex[i] ]++;
186  }
187 
188  //Compute the class probabilities for the lhs group and rhs group
189  for(UINT k=0; k<K; k++){
190  classProbabilities[k][0] = groupCounter[0]>0 ? classProbabilities[k][0]/groupCounter[0] : 0;
191  classProbabilities[k][1] = groupCounter[1]>0 ? classProbabilities[k][1]/groupCounter[1] : 0;
192  }
193 
194  //Compute the Gini index for the lhs and rhs groups
195  giniIndexL = giniIndexR = 0;
196  for(UINT k=0; k<K; k++){
197  giniIndexL += classProbabilities[k][0] * (1.0-classProbabilities[k][0]);
198  giniIndexR += classProbabilities[k][1] * (1.0-classProbabilities[k][1]);
199  }
200  weightL = groupCounter[0]/M;
201  weightR = groupCounter[1]/M;
202  error = (giniIndexL*weightL) + (giniIndexR*weightR);
203 
204  //Store the best threshold and feature index
205  if( error < minError ){
206  minError = error;
207  bestFeatureIndexA = featureIndexA;
208  bestFeatureIndexB = featureIndexB;
209  bestFeatureIndexC = featureIndexC;
210  }
211  }
212 
213  trainingLog << "Best features indexs: [" << bestFeatureIndexA << "," << bestFeatureIndexB << "," << bestFeatureIndexC << "] Min Error: " << minError << std::endl;
214 
215  //Set the best feature index that will be returned to the DecisionTree that called this function
216  featureIndex = bestFeatureIndexB;
217 
218  //Store the node size, feature indexs and class probabilities for this node
219  set(M,bestFeatureIndexA,bestFeatureIndexB,bestFeatureIndexC,trainingData.getClassProbabilities(classLabels));
220 
221  return true;
222 }
223 
225 
226  if( !file.is_open() )
227  {
228  errorLog << "saveParametersToFile(fstream &file) - File is not open!" << std::endl;
229  return false;
230  }
231 
232  //Save the DecisionTreeNode parameters
234  errorLog << "saveParametersToFile(fstream &file) - Failed to save DecisionTreeNode parameters to file!" << std::endl;
235  return false;
236  }
237 
238  //Save the custom DecisionTreeThresholdNode parameters
239  file << "FeatureIndexA: " << featureIndexA << std::endl;
240  file << "FeatureIndexB: " << featureIndexB << std::endl;
241  file << "FeatureIndexC: " << featureIndexC << std::endl;
242 
243  return true;
244 }
245 
247 
248  if(!file.is_open())
249  {
250  errorLog << "loadParametersFromFile(fstream &file) - File is not open!" << std::endl;
251  return false;
252  }
253 
254  //Load the DecisionTreeNode parameters
256  errorLog << "loadParametersFromFile(fstream &file) - Failed to load DecisionTreeNode parameters from file!" << std::endl;
257  return false;
258  }
259 
260  std::string word;
261  //Load the custom DecisionTreeThresholdNode Parameters
262  file >> word;
263  if( word != "FeatureIndexA:" ){
264  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexA header!" << std::endl;
265  return false;
266  }
267  file >> featureIndexA;
268 
269  file >> word;
270  if( word != "FeatureIndexB:" ){
271  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexB header!" << std::endl;
272  return false;
273  }
274  file >> featureIndexB;
275 
276  file >> word;
277  if( word != "FeatureIndexC:" ){
278  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexC header!" << std::endl;
279  return false;
280  }
281  file >> featureIndexC;
282 
283  return true;
284 }
285 
286 GRT_END_NAMESPACE
287 
288 
virtual bool predict(VectorFloat inputVector)
Definition: MLBase.cpp:135
bool set(const UINT nodeSize, const UINT featureIndexA, const UINT featureIndexB, const UINT featureIndexC, const VectorFloat &classProbabilities)
virtual bool clear() override
virtual bool getModel(std::ostream &stream) const override
Definition: Node.cpp:116
Definition: Node.h:37
This file contains the Random class, a useful wrapper for generating cross platform random functions...
Definition: Random.h:46
UINT getSize() const
Definition: Vector.h:201
virtual bool loadParametersFromFile(std::fstream &file) override
virtual bool saveParametersToFile(std::fstream &file) const override
virtual bool saveParametersToFile(std::fstream &file) const override
virtual Node * deepCopy() const override
virtual bool loadParametersFromFile(std::fstream &file) override
bool setAllValues(const T &value)
Definition: Matrix.h:366
Vector< unsigned int > getRandomSubset(const unsigned int startRange, const unsigned int endRange, const unsigned int subsetSize)
Definition: Random.cpp:185
UINT getNumSamples() const
virtual bool getModel(std::ostream &stream) const override
This class implements a DecisionTreeTripleFeatureNode, which is a specific type of node used for a De...
Vector< MinMax > getRanges() const
int getRandomNumberInt(int minRange, int maxRange)
Definition: Random.cpp:59
virtual bool predict_(VectorFloat &x) override
virtual Node * deepCopy() const
Definition: Node.cpp:272