GestureRecognitionToolkit  Version: 0.1.0
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
DecisionTreeTripleFeatureNode.cpp
1 
3 
4 GRT_BEGIN_NAMESPACE
5 
6 //Register the DecisionTreeTripleFeatureNode module with the Node base class
7 RegisterNode< DecisionTreeTripleFeatureNode > DecisionTreeTripleFeatureNode::registerModule("DecisionTreeTripleFeatureNode");
8 
10  nodeType = "DecisionTreeTripleFeatureNode";
11  parent = NULL;
12  leftChild = NULL;
13  rightChild = NULL;
14  clear();
15 }
16 
18  clear();
19 }
20 
22 
23  if( (x[ featureIndexA ] - x[ featureIndexB ]) >= (x[ featureIndexC ] - x[ featureIndexB ]) ) return true;
24 
25  return false;
26 }
27 
29 
30  //Call the base class clear function
32 
33  featureIndexA = 0;
34  featureIndexB = 0;
35  featureIndexC = 0;
36 
37  return true;
38 }
39 
41 
42  std::ostringstream stream;
43 
44  if( getModel( stream ) ){
45  std::cout << stream.str();
46  return true;
47  }
48 
49  return false;
50 }
51 
52 bool DecisionTreeTripleFeatureNode::getModel( std::ostream &stream ) const{
53 
54  std::string tab = "";
55  for(UINT i=0; i<depth; i++) tab += "\t";
56 
57  stream << tab << "depth: " << depth;
58  stream << " nodeSize: " << nodeSize;
59  stream << " featureIndexA: " << featureIndexA;
60  stream << " featureIndexB: " << featureIndexB;
61  stream << " featureIndexC: " << featureIndexC;
62  stream << " isLeafNode: " << isLeafNode << std::endl;
63 
64  stream << tab << "ClassProbabilities: ";
65  for(UINT i=0; i<classProbabilities.size(); i++){
66  stream << classProbabilities[i] << "\t";
67  }
68  stream << std::endl;
69 
70  if( leftChild != NULL ){
71  stream << tab << "LeftChild: " << std::endl;
72  leftChild->getModel( stream );
73  }
74 
75  if( rightChild != NULL ){
76  stream << tab << "RightChild: " << std::endl;
77  rightChild->getModel( stream );
78  }
79 
80  return true;
81 }
82 
84 
86 
87  if( node == NULL ){
88  return NULL;
89  }
90 
91  //Copy this node into the node
92  node->depth = depth;
93  node->isLeafNode = isLeafNode;
94  node->nodeID = nodeID;
95  node->predictedNodeID = predictedNodeID;
96  node->nodeSize = nodeSize;
97  node->featureIndexA = featureIndexA;
98  node->featureIndexB = featureIndexB;
99  node->featureIndexC = featureIndexC;
100  node->classProbabilities = classProbabilities;
101 
102  //Recursively deep copy the left child
103  if( leftChild ){
104  node->leftChild = leftChild->deepCopyNode();
105  node->leftChild->setParent( node );
106  }
107 
108  //Recursively deep copy the right child
109  if( rightChild ){
110  node->rightChild = rightChild->deepCopyNode();
111  node->rightChild->setParent( node );
112  }
113 
114  return dynamic_cast< DecisionTreeTripleFeatureNode* >( node );
115 }
116 
118  return dynamic_cast< DecisionTreeTripleFeatureNode* >( deepCopyNode() );
119 }
120 
122  return featureIndexA;
123 }
124 
126  return featureIndexB;
127 }
128 
130  return featureIndexC;
131 }
132 
133 bool DecisionTreeTripleFeatureNode::set(const UINT nodeSize,const UINT featureIndexA,const UINT featureIndexB,const UINT featureIndexC,const VectorFloat &classProbabilities){
134  this->nodeSize = nodeSize;
135  this->featureIndexA = featureIndexA;
136  this->featureIndexB = featureIndexB;
137  this->featureIndexC = featureIndexC;
138  this->classProbabilities = classProbabilities;
139  return true;
140 }
141 
142 bool DecisionTreeTripleFeatureNode::computeBestSpiltBestIterativeSpilt( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
143 
144  return computeBestSpilt( numSplittingSteps, trainingData, features, classLabels, featureIndex, minError);
145 }
146 
147 bool DecisionTreeTripleFeatureNode::computeBestSpiltBestRandomSpilt( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
148 
149  return computeBestSpilt( numSplittingSteps, trainingData, features, classLabels, featureIndex, minError);
150 }
151 
152 bool DecisionTreeTripleFeatureNode::computeBestSpilt( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
153 
154  const UINT M = trainingData.getNumSamples();
155  const UINT N = features.getSize();
156  const UINT K = classLabels.getSize();
157 
158  if( N == 0 ) return false;
159 
161  Random random;
162  UINT bestFeatureIndexA = 0;
163  UINT bestFeatureIndexB = 0;
164  UINT bestFeatureIndexC = 0;
165  Float error = 0;
166  Float giniIndexL = 0;
167  Float giniIndexR = 0;
168  Float weightL = 0;
169  Float weightR = 0;
170  Vector< UINT > groupIndex(M);
171  VectorFloat groupCounter(2,0);
172  Vector< MinMax > ranges = trainingData.getRanges();
173  MatrixFloat classProbabilities(K,2);
174  MatrixFloat data(M,1); //This will store our temporary data for each dimension
175 
176  //Randomly select which features we want to use
177  UINT numRandomFeatures = numSplittingSteps > N ? N : numSplittingSteps;
178  Vector< UINT > randomFeatures = random.getRandomSubset( 0, N, numRandomFeatures );
179 
180  //Loop over each random feature and try and find the best split point
181  for(UINT n=0; n<numRandomFeatures; n++){
182 
183  //Randomly select 3 features to use
184  featureIndexB = features[ randomFeatures[n] ]; //B is the central feature
185  featureIndexA = features[ randomFeatures[ random.getRandomNumberInt(0,numRandomFeatures) ] ];
186  featureIndexC = features[ randomFeatures[ random.getRandomNumberInt(0,numRandomFeatures) ] ];
187 
188  //Iterate over each sample and work out if it should be in the lhs (0) or rhs (1) group based on the current threshold
189  groupCounter[0] = groupCounter[1] = 0;
190  classProbabilities.setAllValues(0);
191  for(UINT i=0; i<M; i++){
192  groupIndex[i] = predict( trainingData[i].getSample() ) ? 1 : 0;
193  groupCounter[ groupIndex[i] ]++;
194  classProbabilities[ getClassLabelIndexValue(trainingData[i].getClassLabel(),classLabels) ][ groupIndex[i] ]++;
195  }
196 
197  //Compute the class probabilities for the lhs group and rhs group
198  for(UINT k=0; k<K; k++){
199  classProbabilities[k][0] = groupCounter[0]>0 ? classProbabilities[k][0]/groupCounter[0] : 0;
200  classProbabilities[k][1] = groupCounter[1]>0 ? classProbabilities[k][1]/groupCounter[1] : 0;
201  }
202 
203  //Compute the Gini index for the lhs and rhs groups
204  giniIndexL = giniIndexR = 0;
205  for(UINT k=0; k<K; k++){
206  giniIndexL += classProbabilities[k][0] * (1.0-classProbabilities[k][0]);
207  giniIndexR += classProbabilities[k][1] * (1.0-classProbabilities[k][1]);
208  }
209  weightL = groupCounter[0]/M;
210  weightR = groupCounter[1]/M;
211  error = (giniIndexL*weightL) + (giniIndexR*weightR);
212 
213  //Store the best threshold and feature index
214  if( error < minError ){
215  minError = error;
216  bestFeatureIndexA = featureIndexA;
217  bestFeatureIndexB = featureIndexB;
218  bestFeatureIndexC = featureIndexC;
219  }
220  }
221 
222  trainingLog << "Best features indexs: [" << bestFeatureIndexA << "," << bestFeatureIndexB << "," << bestFeatureIndexC << "] Min Error: " << minError << std::endl;
223 
224  //Set the best feature index that will be returned to the DecisionTree that called this function
225  featureIndex = bestFeatureIndexB;
226 
227  //Store the node size, feature indexs and class probabilities for this node
228  set(M,bestFeatureIndexA,bestFeatureIndexB,bestFeatureIndexC,trainingData.getClassProbabilities(classLabels));
229 
230  return true;
231 }
232 
234 
235  if( !file.is_open() )
236  {
237  errorLog << "saveParametersToFile(fstream &file) - File is not open!" << std::endl;
238  return false;
239  }
240 
241  //Save the DecisionTreeNode parameters
243  errorLog << "saveParametersToFile(fstream &file) - Failed to save DecisionTreeNode parameters to file!" << std::endl;
244  return false;
245  }
246 
247  //Save the custom DecisionTreeThresholdNode parameters
248  file << "FeatureIndexA: " << featureIndexA << std::endl;
249  file << "FeatureIndexB: " << featureIndexB << std::endl;
250  file << "FeatureIndexC: " << featureIndexC << std::endl;
251 
252  return true;
253 }
254 
256 
257  if(!file.is_open())
258  {
259  errorLog << "loadParametersFromFile(fstream &file) - File is not open!" << std::endl;
260  return false;
261  }
262 
263  //Load the DecisionTreeNode parameters
265  errorLog << "loadParametersFromFile(fstream &file) - Failed to load DecisionTreeNode parameters from file!" << std::endl;
266  return false;
267  }
268 
269  std::string word;
270  //Load the custom DecisionTreeThresholdNode Parameters
271  file >> word;
272  if( word != "FeatureIndexA:" ){
273  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexA header!" << std::endl;
274  return false;
275  }
276  file >> featureIndexA;
277 
278  file >> word;
279  if( word != "FeatureIndexB:" ){
280  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexB header!" << std::endl;
281  return false;
282  }
283  file >> featureIndexB;
284 
285  file >> word;
286  if( word != "FeatureIndexC:" ){
287  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexC header!" << std::endl;
288  return false;
289  }
290  file >> featureIndexC;
291 
292  return true;
293 }
294 
295 GRT_END_NAMESPACE
296 
297 
bool set(const UINT nodeSize, const UINT featureIndexA, const UINT featureIndexB, const UINT featureIndexC, const VectorFloat &classProbabilities)
This file implements a DecisionTreeTripleFeatureNode, which is a specific type of node used for a Dec...
virtual bool clear()
Definition: Node.h:37
Definition: Random.h:40
virtual bool getModel(std::ostream &stream) const
Definition: Node.cpp:119
unsigned int getSize() const
Definition: Vector.h:193
virtual bool saveParametersToFile(std::fstream &file) const
Vector< unsigned int > getRandomSubset(const unsigned int startRange, const unsigned int endRange, const unsigned int subsetSize)
Definition: Random.h:268
UINT getNumSamples() const
virtual Node * deepCopyNode() const
Definition: Node.cpp:275
virtual bool predict(const VectorFloat &x)
virtual bool getModel(std::ostream &stream) const
virtual bool saveParametersToFile(std::fstream &file) const
DecisionTreeTripleFeatureNode * deepCopy() const
Vector< MinMax > getRanges() const
int getRandomNumberInt(int minRange, int maxRange)
Definition: Random.h:88
virtual bool loadParametersFromFile(std::fstream &file)
virtual bool loadParametersFromFile(std::fstream &file)