GestureRecognitionToolkit  Version: 0.2.0
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
DecisionTreeTripleFeatureNode.cpp
1 
2 #define GRT_DLL_EXPORTS
4 
5 GRT_BEGIN_NAMESPACE
6 
7 //Register the DecisionTreeTripleFeatureNode module with the Node base class
8 RegisterNode< DecisionTreeTripleFeatureNode > DecisionTreeTripleFeatureNode::registerModule("DecisionTreeTripleFeatureNode");
9 
11  nodeType = "DecisionTreeTripleFeatureNode";
12  parent = NULL;
13  leftChild = NULL;
14  rightChild = NULL;
15  clear();
16 }
17 
19  clear();
20 }
21 
23 
24  if( (x[ featureIndexA ] - x[ featureIndexB ]) >= (x[ featureIndexC ] - x[ featureIndexB ]) ) return true;
25 
26  return false;
27 }
28 
30 
31  //Call the base class clear function
33 
34  featureIndexA = 0;
35  featureIndexB = 0;
36  featureIndexC = 0;
37 
38  return true;
39 }
40 
42 
43  std::ostringstream stream;
44 
45  if( getModel( stream ) ){
46  std::cout << stream.str();
47  return true;
48  }
49 
50  return false;
51 }
52 
53 bool DecisionTreeTripleFeatureNode::getModel( std::ostream &stream ) const{
54 
55  std::string tab = "";
56  for(UINT i=0; i<depth; i++) tab += "\t";
57 
58  stream << tab << "depth: " << depth;
59  stream << " nodeSize: " << nodeSize;
60  stream << " featureIndexA: " << featureIndexA;
61  stream << " featureIndexB: " << featureIndexB;
62  stream << " featureIndexC: " << featureIndexC;
63  stream << " isLeafNode: " << isLeafNode << std::endl;
64 
65  stream << tab << "ClassProbabilities: ";
66  for(UINT i=0; i<classProbabilities.size(); i++){
67  stream << classProbabilities[i] << "\t";
68  }
69  stream << std::endl;
70 
71  if( leftChild != NULL ){
72  stream << tab << "LeftChild: " << std::endl;
73  leftChild->getModel( stream );
74  }
75 
76  if( rightChild != NULL ){
77  stream << tab << "RightChild: " << std::endl;
78  rightChild->getModel( stream );
79  }
80 
81  return true;
82 }
83 
85 
87 
88  if( node == NULL ){
89  return NULL;
90  }
91 
92  //Copy this node into the node
93  node->depth = depth;
94  node->isLeafNode = isLeafNode;
95  node->nodeID = nodeID;
96  node->predictedNodeID = predictedNodeID;
97  node->nodeSize = nodeSize;
98  node->featureIndexA = featureIndexA;
99  node->featureIndexB = featureIndexB;
100  node->featureIndexC = featureIndexC;
101  node->classProbabilities = classProbabilities;
102 
103  //Recursively deep copy the left child
104  if( leftChild ){
105  node->leftChild = leftChild->deepCopyNode();
106  node->leftChild->setParent( node );
107  }
108 
109  //Recursively deep copy the right child
110  if( rightChild ){
111  node->rightChild = rightChild->deepCopyNode();
112  node->rightChild->setParent( node );
113  }
114 
115  return dynamic_cast< DecisionTreeTripleFeatureNode* >( node );
116 }
117 
119  return dynamic_cast< DecisionTreeTripleFeatureNode* >( deepCopyNode() );
120 }
121 
123  return featureIndexA;
124 }
125 
127  return featureIndexB;
128 }
129 
131  return featureIndexC;
132 }
133 
134 bool DecisionTreeTripleFeatureNode::set(const UINT nodeSize,const UINT featureIndexA,const UINT featureIndexB,const UINT featureIndexC,const VectorFloat &classProbabilities){
135  this->nodeSize = nodeSize;
136  this->featureIndexA = featureIndexA;
137  this->featureIndexB = featureIndexB;
138  this->featureIndexC = featureIndexC;
139  this->classProbabilities = classProbabilities;
140  return true;
141 }
142 
143 bool DecisionTreeTripleFeatureNode::computeBestSpiltBestIterativeSpilt( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
144 
145  return computeBestSpilt( numSplittingSteps, trainingData, features, classLabels, featureIndex, minError);
146 }
147 
148 bool DecisionTreeTripleFeatureNode::computeBestSpiltBestRandomSpilt( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
149 
150  return computeBestSpilt( numSplittingSteps, trainingData, features, classLabels, featureIndex, minError);
151 }
152 
153 bool DecisionTreeTripleFeatureNode::computeBestSpilt( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
154 
155  const UINT M = trainingData.getNumSamples();
156  const UINT N = features.getSize();
157  const UINT K = classLabels.getSize();
158 
159  if( N == 0 ) return false;
160 
162  Random random;
163  UINT bestFeatureIndexA = 0;
164  UINT bestFeatureIndexB = 0;
165  UINT bestFeatureIndexC = 0;
166  Float error = 0;
167  Float giniIndexL = 0;
168  Float giniIndexR = 0;
169  Float weightL = 0;
170  Float weightR = 0;
171  Vector< UINT > groupIndex(M);
172  VectorFloat groupCounter(2,0);
173  Vector< MinMax > ranges = trainingData.getRanges();
174  MatrixFloat classProbabilities(K,2);
175  MatrixFloat data(M,1); //This will store our temporary data for each dimension
176 
177  //Randomly select which features we want to use
178  UINT numRandomFeatures = numSplittingSteps > N ? N : numSplittingSteps;
179  Vector< UINT > randomFeatures = random.getRandomSubset( 0, N, numRandomFeatures );
180 
181  //Loop over each random feature and try and find the best split point
182  for(UINT n=0; n<numRandomFeatures; n++){
183 
184  //Randomly select 3 features to use
185  featureIndexB = features[ randomFeatures[n] ]; //B is the central feature
186  featureIndexA = features[ randomFeatures[ random.getRandomNumberInt(0,numRandomFeatures) ] ];
187  featureIndexC = features[ randomFeatures[ random.getRandomNumberInt(0,numRandomFeatures) ] ];
188 
189  //Iterate over each sample and work out if it should be in the lhs (0) or rhs (1) group based on the current threshold
190  groupCounter[0] = groupCounter[1] = 0;
191  classProbabilities.setAllValues(0);
192  for(UINT i=0; i<M; i++){
193  groupIndex[i] = predict( trainingData[i].getSample() ) ? 1 : 0;
194  groupCounter[ groupIndex[i] ]++;
195  classProbabilities[ getClassLabelIndexValue(trainingData[i].getClassLabel(),classLabels) ][ groupIndex[i] ]++;
196  }
197 
198  //Compute the class probabilities for the lhs group and rhs group
199  for(UINT k=0; k<K; k++){
200  classProbabilities[k][0] = groupCounter[0]>0 ? classProbabilities[k][0]/groupCounter[0] : 0;
201  classProbabilities[k][1] = groupCounter[1]>0 ? classProbabilities[k][1]/groupCounter[1] : 0;
202  }
203 
204  //Compute the Gini index for the lhs and rhs groups
205  giniIndexL = giniIndexR = 0;
206  for(UINT k=0; k<K; k++){
207  giniIndexL += classProbabilities[k][0] * (1.0-classProbabilities[k][0]);
208  giniIndexR += classProbabilities[k][1] * (1.0-classProbabilities[k][1]);
209  }
210  weightL = groupCounter[0]/M;
211  weightR = groupCounter[1]/M;
212  error = (giniIndexL*weightL) + (giniIndexR*weightR);
213 
214  //Store the best threshold and feature index
215  if( error < minError ){
216  minError = error;
217  bestFeatureIndexA = featureIndexA;
218  bestFeatureIndexB = featureIndexB;
219  bestFeatureIndexC = featureIndexC;
220  }
221  }
222 
223  trainingLog << "Best features indexs: [" << bestFeatureIndexA << "," << bestFeatureIndexB << "," << bestFeatureIndexC << "] Min Error: " << minError << std::endl;
224 
225  //Set the best feature index that will be returned to the DecisionTree that called this function
226  featureIndex = bestFeatureIndexB;
227 
228  //Store the node size, feature indexs and class probabilities for this node
229  set(M,bestFeatureIndexA,bestFeatureIndexB,bestFeatureIndexC,trainingData.getClassProbabilities(classLabels));
230 
231  return true;
232 }
233 
235 
236  if( !file.is_open() )
237  {
238  errorLog << "saveParametersToFile(fstream &file) - File is not open!" << std::endl;
239  return false;
240  }
241 
242  //Save the DecisionTreeNode parameters
244  errorLog << "saveParametersToFile(fstream &file) - Failed to save DecisionTreeNode parameters to file!" << std::endl;
245  return false;
246  }
247 
248  //Save the custom DecisionTreeThresholdNode parameters
249  file << "FeatureIndexA: " << featureIndexA << std::endl;
250  file << "FeatureIndexB: " << featureIndexB << std::endl;
251  file << "FeatureIndexC: " << featureIndexC << std::endl;
252 
253  return true;
254 }
255 
257 
258  if(!file.is_open())
259  {
260  errorLog << "loadParametersFromFile(fstream &file) - File is not open!" << std::endl;
261  return false;
262  }
263 
264  //Load the DecisionTreeNode parameters
266  errorLog << "loadParametersFromFile(fstream &file) - Failed to load DecisionTreeNode parameters from file!" << std::endl;
267  return false;
268  }
269 
270  std::string word;
271  //Load the custom DecisionTreeThresholdNode Parameters
272  file >> word;
273  if( word != "FeatureIndexA:" ){
274  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexA header!" << std::endl;
275  return false;
276  }
277  file >> featureIndexA;
278 
279  file >> word;
280  if( word != "FeatureIndexB:" ){
281  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexB header!" << std::endl;
282  return false;
283  }
284  file >> featureIndexB;
285 
286  file >> word;
287  if( word != "FeatureIndexC:" ){
288  errorLog << "loadParametersFromFile(fstream &file) - Failed to find FeatureIndexC header!" << std::endl;
289  return false;
290  }
291  file >> featureIndexC;
292 
293  return true;
294 }
295 
296 GRT_END_NAMESPACE
297 
298 
bool set(const UINT nodeSize, const UINT featureIndexA, const UINT featureIndexB, const UINT featureIndexC, const VectorFloat &classProbabilities)
This file implements a DecisionTreeTripleFeatureNode, which is a specific type of node used for a Dec...
virtual bool clear()
Definition: Node.h:37
Definition: Random.h:40
virtual bool getModel(std::ostream &stream) const
Definition: Node.cpp:120
UINT getSize() const
Definition: Vector.h:191
virtual bool saveParametersToFile(std::fstream &file) const
Vector< unsigned int > getRandomSubset(const unsigned int startRange, const unsigned int endRange, const unsigned int subsetSize)
Definition: Random.h:268
UINT getNumSamples() const
virtual Node * deepCopyNode() const
Definition: Node.cpp:276
virtual bool predict(const VectorFloat &x)
virtual bool getModel(std::ostream &stream) const
virtual bool saveParametersToFile(std::fstream &file) const
DecisionTreeTripleFeatureNode * deepCopy() const
Vector< MinMax > getRanges() const
int getRandomNumberInt(int minRange, int maxRange)
Definition: Random.h:88
virtual bool loadParametersFromFile(std::fstream &file)
virtual bool loadParametersFromFile(std::fstream &file)