GestureRecognitionToolkit  Version: 0.1.0
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
EvolutionaryAlgorithm.h
Go to the documentation of this file.
1 
12 /*
13  GRT MIT License
14  Copyright (c) <2013> <Nicholas Gillian, Media Lab, MIT>
15 
16  Permission is hereby granted, free of charge, to any person obtaining a copy of this software
17  and associated documentation files (the "Software"), to deal in the Software without restriction,
18  including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
19  and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
20  subject to the following conditions:
21 
22  The above copyright notice and this permission notice shall be included in all copies or substantial
23  portions of the Software.
24 
25  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
26  LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
27  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
28  WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
29  SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
30 
31  */
32 
33 #ifndef GRT_EVOLUTIONARY_ALGORITHM_HEADER
34 #define GRT_EVOLUTIONARY_ALGORITHM_HEADER
35 
36 #include "Individual.h"
37 
38 GRT_BEGIN_NAMESPACE
39 
40 template <typename INDIVIDUAL>
42 
43 public:
51  EvolutionaryAlgorithm(const UINT populationSize = 0,const UINT geneSize = 0){
52  maxIteration = 1000;
53  minNumIterationsNoChange = 1;
54  storeRate = 1;
55  bestIndividualIndex = 0;
56  bestIndividualFitness = 0;
57  mutationRate = 0.01;
58  minChange = 1.0e-5;
59  baiseCoeff = 2.0;
60  initialized = false;
61  useElitism = true;
62  storeHistory = true;
63  baiseWeights = true;
64 
65  errorLog.setProceedingText("[EVO ERROR]");
66  trainingLog.setProceedingText("[EVO TRAINING]");
67  warningLog.setProceedingText("[EVO WARNING]");
68 
69  initPopulation( populationSize, geneSize );
70  }
71 
76 
77  }
78 
86  INDIVIDUAL& operator[](const UINT &index){
87  return population[ index ];
88  }
89 
98  virtual bool initPopulation(const UINT populationSize,const UINT geneSize){
99 
100  initialized = false;
101  this->populationSize = 0;
102  this->geneSize = 0;
103  bestIndividualIndex = 0;
104  bestIndividualFitness = 0;
105  population.clear();
106  populationWeights.clear();
107  accumSumLookup.clear();
108  populationHistory.clear();
109  fitnessHistory.clear();
110 
111  if( populationSize == 0 || geneSize == 0 ) return false;
112 
113  //Init the memory
114  this->populationSize = populationSize;
115  this->geneSize = geneSize;
116  population.resize( populationSize );
117  populationWeights.resize( populationSize );
118  accumSumLookup.resize( populationSize );
119 
120  //Init each individual
121  UINT i = 0;
122  UINT index = 0;
123  typename Vector< INDIVIDUAL >::iterator populationIter = population.begin();
124  Vector< IndexedDouble >::iterator weightsIter = populationWeights.begin();
125  VectorFloat::iterator geneIter;
126 
127  while( populationIter != population.end() ){
128  populationIter->fitness = 0;
129  populationIter->gene.resize( geneSize );
130 
131  //Randomize the gene
132  for(i=0; i<geneSize; i++){
133  populationIter->gene[ i ] = generateRandomGeneValue();
134  }
135 
136  weightsIter->value = populationIter->fitness;
137  weightsIter->index = index++;
138 
139  populationIter++;
140  weightsIter++;
141  }
142 
143  //Save the initial population as the parents
144  parents = population;
145 
146  initialized = true;
147 
148  return true;
149  }
150 
160  virtual bool estimatePopulationFitness( const MatrixFloat &trainingData, Float &bestFitness, UINT &bestIndex ){
161 
162  UINT index = 0;
163  bestFitness = 0;
164  bestIndex = 0;
165 
166  if( !initialized ) return false;
167 
168  typename Vector< INDIVIDUAL >::iterator populationIter = population.begin();
169  Vector< IndexedDouble >::iterator weightsIter = populationWeights.begin();
170 
171  while( populationIter != population.end() ){
172  //Compute the fitness of the current individual
173  weightsIter->value = evaluateFitness( *populationIter, trainingData );
174  weightsIter->index = index++;
175 
176  //Check to see if this is the best fitness so far
177  if( weightsIter->value > bestFitness ){
178  bestFitness = weightsIter->value;
179  bestIndex = weightsIter->index;
180  }
181 
182  //Update the iterators
183  populationIter++;
184  weightsIter++;
185  }
186 
187  return true;
188  }
189 
196  virtual bool evolvePopulation(){
197 
198  if( !initialized ) return false;
199 
200  UINT i=0;
201  UINT index = 0;
202  UINT mom = 0;
203  UINT dad = 0;
204  UINT crossOverPoint = 0;
205  typename Vector< INDIVIDUAL >::iterator populationIter = population.begin();
206  Vector< IndexedDouble >::iterator weightsIter = populationWeights.begin();
207 
208  //Get the current weights values
209  weightsIter = populationWeights.begin();
210  while( populationIter != population.end() ){
211  weightsIter->value = baiseWeights ? pow( populationIter->fitness, baiseCoeff ) : populationIter->fitness;
212  weightsIter->index = index++;
213 
214  populationIter++;
215  weightsIter++;
216  }
217 
218  //Sort the weighted values by value in ascending order (so the least likely value is first, the second least likely is second, etc...
219  sort(populationWeights.begin(),populationWeights.end(),IndexedDouble::sortIndexedDoubleByValueAscending);
220 
221  //Create the accumulated sum lookup table
222  accumSumLookup[0] = populationWeights[0].value;
223  for(unsigned int i=1; i<populationSize; i++){
224  accumSumLookup[i] = accumSumLookup[i-1] + populationWeights[i].value;
225  }
226 
227  if( accumSumLookup[populationSize-1] == 0 ){
228  warningLog << "evolvePopulation() - The accum sum is zero!" << std::endl;
229  }
230 
231  //Reset the population iterator
232  populationIter = population.begin();
233 
234  if( useElitism ){
235  //The first child is simply a copy of the best parent
236  populationIter->gene = parents[ bestIndividualIndex ].gene;
237  populationIter++;
238  }
239 
240  //This is the main evolve loop, at each iteration we do the following until we reach the end of the population
241  //- Randomly select a mom and dad from the parents population (parents with a better fitness have a better chance of being selected)
242  //- Randomly select a cross over point (this is an index that sets what point along the gene we will merge the data from mom and dad)
243  //- Create two individuals (if possible) by combining the gene data from mom and dad
244  //- The first child will use the following genes [mom |crossOverPoint| dad], whereas the second child will use [dad |crossOverPoint| mom]
245  //- After the cross over, each gene will be randomly mutated
246  index = 0;
247  while( populationIter != population.end() ){
248 
249  //Randomly select the parents, individuals with higher weights will have a better chance of being selected
250  mom = rand.getRandomNumberWeighted( populationWeights, accumSumLookup );
251  dad = rand.getRandomNumberWeighted( populationWeights, accumSumLookup );
252 
253  //Select the cross over point
254  crossOverPoint = rand.getRandomNumberInt(0, geneSize);
255 
256  //Generate the new individual using cross over (this is the first child)
257  for(i=0; i<geneSize; i++){
258  if( i < crossOverPoint ) populationIter->gene[i] = parents[ mom ].gene[i];
259  else populationIter->gene[i] = parents[ dad ].gene[i];
260  }
261 
262  //Perform random mutation
263  for(i=0; i<geneSize; i++){
264  if( rand.getRandomNumberUniform(0.0,1.0) <= mutationRate ){
265  populationIter->gene[ i ] = generateRandomGeneValue();
266  }
267  }
268 
269  //Update the iterator
270  populationIter++;
271 
272  //Generate the second child (as long as we have not got to the end of the population)
273  if( populationIter != population.end() ){
274 
275  for(i=0; i<geneSize; i++){
276  if( i < crossOverPoint ) populationIter->gene[i] = parents[ dad ].gene[i];
277  else populationIter->gene[i] = parents[ mom ].gene[i];
278  }
279 
280  //Perform random mutation
281  for(i=0; i<geneSize; i++){
282  if( rand.getRandomNumberUniform(0.0,1.0) <= mutationRate ){
283  populationIter->gene[ i ] = generateRandomGeneValue();
284  }
285  }
286 
287  //Update the iterator
288  populationIter++;
289  }
290 
291  }
292 
293  //Store the parents for the next iteration
294  parents = population;
295 
296  return true;
297  }
298 
307  virtual Float evaluateFitness( INDIVIDUAL &individual, const MatrixFloat &trainingData ){
308 
309  individual.fitness = 0;
310 
311  if( !initialized ) return 0;
312 
313  if( trainingData.getNumCols() != geneSize ) return 0;
314 
315  UINT M = trainingData.getNumRows();
316  Float error = 0;
317  Float minError = grt_numeric_limits< Float >::max();
318 
319  for(UINT i=0; i<M; i++){
320  error = 0;
321  //Compute the squared distance
322  for(UINT j=0; j<geneSize; j++){
323  error += ( trainingData[i][j] - individual.gene[j] ) * ( trainingData[i][j] - individual.gene[j] );
324  }
325  if( error < minError ) minError = error;
326  }
327  //Make sure the minError is not zero
328  minError += 0.00001;
329  minError /= Float(geneSize);
330 
331  //Good individuals should have a high fitness
332  individual.fitness = 1.0/(minError*minError);
333 
334  return individual.fitness;
335  }
336 
337  virtual bool train(const MatrixFloat &trainingData){
338 
339  if( !initialized ) return false;
340 
341  UINT currentIteration = 0;
342  UINT numIterationsNoChange = 0;
343  bool keepTraining = true;
344  Float lastBestFitness = 0;
345 
346  if( storeHistory ){
347  populationHistory.reserve( maxIteration/storeRate );
348  fitnessHistory.reserve( maxIteration/storeRate );
349  }
350 
351  //Init the population
352  initPopulation( populationSize, geneSize );
353 
354  //Compute the fitness of the initial population
355  estimatePopulationFitness( trainingData, bestIndividualFitness, bestIndividualIndex );
356  lastBestFitness = bestIndividualFitness;
357 
358  if( storeHistory ){
359  populationHistory.push_back( population );
360  fitnessHistory.push_back( IndexedDouble(bestIndividualIndex, bestIndividualFitness) );
361  }
362 
363  //Start the main loop
364  while( keepTraining ){
365 
366  //Perform the selection
367  if( !evolvePopulation() ){
368  errorLog << "Failed to evolve population" << std::endl;
369  return false;
370  }
371 
372  //Compute population fitness
373  if( !estimatePopulationFitness( trainingData, bestIndividualFitness, bestIndividualIndex ) ){
374  return false;
375  }
376 
377  Float delta = fabs( bestIndividualFitness-lastBestFitness );
378  lastBestFitness = bestIndividualFitness;
379 
380  trainingLog << "Iteration: " << currentIteration << "\tBestFitness: " << bestIndividualFitness << "\tBestIndex: " << bestIndividualIndex << "\tDelta: " << delta << "\tNumIterationsNoChange: " << numIterationsNoChange << std::endl;
381 
382  if( currentIteration >= maxIteration ){
383  keepTraining = false;
384  trainingLog << "Max Iteration Reached!" << std::endl;
385  }
386 
387  if( delta <= minChange ){
388  if( ++numIterationsNoChange >= minNumIterationsNoChange ){
389  keepTraining = false;
390  trainingLog << "Min Changed Reached!" << std::endl;
391  }
392  }else{
393  numIterationsNoChange = 0;
394  }
395 
396  if( customConvergenceCheck() ){
397  keepTraining = false;
398  trainingLog << "Custom Convergance Triggered!" << std::endl;
399  }
400 
401  //Update the iteration
402  currentIteration++;
403 
404  //Save the current population
405  if( currentIteration % storeRate == 0 && storeHistory ){
406  populationHistory.push_back( population );
407  fitnessHistory.push_back( IndexedDouble(bestIndividualIndex, bestIndividualFitness) );
408  }
409  }
410 
411  return true;
412  }
413 
414  UINT getPopulationSize() const{
415  return populationSize;
416  }
417 
418  bool getInitialized() const{
419  return initialized;
420  }
421 
422  Vector< INDIVIDUAL > getPopulation() const{
423  return population;
424  }
425 
426  bool setPopulationSize(const UINT populationSize){
427  this->populationSize = populationSize;
428  return true;
429  }
430 
431  bool setMinNumIterationsNoChange(const UINT minNumIterationsNoChange){
432  this->minNumIterationsNoChange = minNumIterationsNoChange;
433  return true;
434  }
435 
436  bool setMaxIterations(const UINT maxIteration){
437  this->maxIteration = maxIteration;
438  return true;
439  }
440 
441  bool setStoreRate(const UINT storeRate){
442  this->storeRate = storeRate;
443  return true;
444  }
445 
446  bool setStoreHistory(const bool storeHistory){
447  this->storeHistory = storeHistory;
448  return true;
449  }
450 
451  bool setBaiseWeights(const bool baiseWeights){
452  this->baiseWeights = baiseWeights;
453  return true;
454  }
455 
456  bool setBaiseCoeff(const Float baiseCoeff){
457  this->baiseCoeff = baiseCoeff;
458  return true;
459  }
460 
461  bool setMutationRate(const Float mutationRate){
462  this->mutationRate = mutationRate;
463  return true;
464  }
465 
466  bool setMinChange(const Float minChange){
467  this->minChange = minChange;
468  return true;
469  }
470 
471  virtual bool setPopulation( const Vector< INDIVIDUAL > &newPopulation ){
472 
473  if( newPopulation.size() == 0 ) return false;
474 
475  population = newPopulation;
476  populationSize = (UINT)population.size();
477  populationWeights.resize( populationSize );
478  accumSumLookup.resize( populationSize );
479 
480  UINT index = 0;
481  typename Vector< INDIVIDUAL >::iterator populationIter = population.begin();
482  Vector< IndexedDouble >::iterator weightsIter = populationWeights.begin();
483  VectorFloat::iterator geneIter;
484 
485  while( populationIter != population.end() ){
486  weightsIter->value = populationIter->fitness;
487  weightsIter->index = index++;
488 
489  populationIter++;
490  weightsIter++;
491  }
492 
493  return true;
494  }
495 
496  virtual inline Float generateRandomGeneValue(){
497  return rand.getRandomNumberUniform(0.0,1.0);
498  }
499 
500  virtual bool customConvergenceCheck(){
501  return false;
502  }
503 
504  virtual bool printBest() const{
505  if( !initialized ) return false;
506 
507  std::cout << "BestIndividual: ";
508  for(UINT i=0; i<geneSize; i++){
509  std::cout << population[ bestIndividualIndex ].gene[i] << "\t";
510  }
511  std::cout << std::endl;
512  return true;
513  }
514 
515 public:
516 
517  bool initialized;
518  bool useElitism;
519  bool storeHistory;
520  bool baiseWeights;
521  UINT populationSize;
522  UINT geneSize;
523  UINT minNumIterationsNoChange;
524  UINT maxIteration;
525  UINT storeRate;
526  UINT bestIndividualIndex;
527  Float bestIndividualFitness;
528  Float mutationRate;
529  Float minChange;
530  Float baiseCoeff;
531  Random rand;
532  Vector< INDIVIDUAL > population;
533  Vector< INDIVIDUAL > parents;
534  Vector< IndexedDouble > populationWeights;
535  Vector< Vector< INDIVIDUAL > > populationHistory;
536  Vector< IndexedDouble > fitnessHistory;
537  VectorFloat accumSumLookup;
538 };
539 
540 GRT_END_NAMESPACE
541 
542 #endif //GRT_EVOLUTIONARY_ALGORITHM_HEADER
EvolutionaryAlgorithm(const UINT populationSize=0, const UINT geneSize=0)
virtual bool estimatePopulationFitness(const MatrixFloat &trainingData, Float &bestFitness, UINT &bestIndex)
Definition: Random.h:40
virtual bool resize(const unsigned int size)
Definition: Vector.h:133
virtual Float evaluateFitness(INDIVIDUAL &individual, const MatrixFloat &trainingData)
int getRandomNumberWeighted(const Vector< int > &values, const VectorFloat &weights)
Definition: Random.h:105
unsigned int getNumRows() const
Definition: Matrix.h:542
unsigned int getNumCols() const
Definition: Matrix.h:549
Float getRandomNumberUniform(Float minRange=0.0, Float maxRange=1.0)
Definition: Random.h:198
INDIVIDUAL & operator[](const UINT &index)
int getRandomNumberInt(int minRange, int maxRange)
Definition: Random.h:88
Definition: Vector.h:41
virtual bool initPopulation(const UINT populationSize, const UINT geneSize)