GestureRecognitionToolkit  Version: 0.2.5
The Gesture Recognition Toolkit (GRT) is a cross-platform, open-source, c++ machine learning library for real-time gesture recognition.
EvolutionaryAlgorithm.h
Go to the documentation of this file.
1 
6 /*
7  GRT MIT License
8  Copyright (c) <2013> <Nicholas Gillian, Media Lab, MIT>
9 
10  Permission is hereby granted, free of charge, to any person obtaining a copy of this software
11  and associated documentation files (the "Software"), to deal in the Software without restriction,
12  including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
13  and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
14  subject to the following conditions:
15 
16  The above copyright notice and this permission notice shall be included in all copies or substantial
17  portions of the Software.
18 
19  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
20  LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
21  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
22  WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
23  SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24  */
25 
26 #ifndef GRT_EVOLUTIONARY_ALGORITHM_HEADER
27 #define GRT_EVOLUTIONARY_ALGORITHM_HEADER
28 
29 #include "Individual.h"
30 
31 GRT_BEGIN_NAMESPACE
32 
38 template <typename INDIVIDUAL>
40 
41 public:
49  EvolutionaryAlgorithm(const UINT populationSize = 0,const UINT geneSize = 0) : MLBase("EVO")
50  {
51  maxIteration = 1000;
52  minNumIterationsNoChange = 1;
53  storeRate = 1;
54  bestIndividualIndex = 0;
55  bestIndividualFitness = 0;
56  mutationRate = 0.01;
57  minChange = 1.0e-5;
58  baiseCoeff = 2.0;
59  initialized = false;
60  useElitism = true;
61  storeHistory = true;
62  baiseWeights = true;
63 
64  initPopulation( populationSize, geneSize );
65  }
66 
71 
72  }
73 
81  INDIVIDUAL& operator[](const UINT &index){
82  return population[ index ];
83  }
84 
93  virtual bool initPopulation(const UINT populationSize,const UINT geneSize){
94 
95  initialized = false;
96  this->populationSize = 0;
97  this->geneSize = 0;
98  bestIndividualIndex = 0;
99  bestIndividualFitness = 0;
100  population.clear();
101  populationWeights.clear();
102  accumSumLookup.clear();
103  populationHistory.clear();
104  fitnessHistory.clear();
105 
106  if( populationSize == 0 || geneSize == 0 ) return false;
107 
108  //Init the memory
109  this->populationSize = populationSize;
110  this->geneSize = geneSize;
111  population.resize( populationSize );
112  populationWeights.resize( populationSize );
113  accumSumLookup.resize( populationSize );
114 
115  //Init each individual
116  UINT i = 0;
117  UINT index = 0;
118  typename Vector< INDIVIDUAL >::iterator populationIter = population.begin();
119  Vector< IndexedDouble >::iterator weightsIter = populationWeights.begin();
120  VectorFloat::iterator geneIter;
121 
122  while( populationIter != population.end() ){
123  populationIter->fitness = 0;
124  populationIter->gene.resize( geneSize );
125 
126  //Randomize the gene
127  for(i=0; i<geneSize; i++){
128  populationIter->gene[ i ] = generateRandomGeneValue();
129  }
130 
131  weightsIter->value = populationIter->fitness;
132  weightsIter->index = index++;
133 
134  populationIter++;
135  weightsIter++;
136  }
137 
138  //Save the initial population as the parents
139  parents = population;
140 
141  initialized = true;
142 
143  return true;
144  }
145 
155  virtual bool estimatePopulationFitness( const MatrixFloat &trainingData, Float &bestFitness, UINT &bestIndex ){
156 
157  UINT index = 0;
158  bestFitness = 0;
159  bestIndex = 0;
160 
161  if( !initialized ) return false;
162 
163  typename Vector< INDIVIDUAL >::iterator populationIter = population.begin();
164  Vector< IndexedDouble >::iterator weightsIter = populationWeights.begin();
165 
166  while( populationIter != population.end() ){
167  //Compute the fitness of the current individual
168  weightsIter->value = evaluateFitness( *populationIter, trainingData );
169  weightsIter->index = index++;
170 
171  //Check to see if this is the best fitness so far
172  if( weightsIter->value > bestFitness ){
173  bestFitness = weightsIter->value;
174  bestIndex = weightsIter->index;
175  }
176 
177  //Update the iterators
178  populationIter++;
179  weightsIter++;
180  }
181 
182  return true;
183  }
184 
191  virtual bool evolvePopulation(){
192 
193  if( !initialized ) return false;
194 
195  UINT i=0;
196  UINT index = 0;
197  UINT mom = 0;
198  UINT dad = 0;
199  UINT crossOverPoint = 0;
200  typename Vector< INDIVIDUAL >::iterator populationIter = population.begin();
201  Vector< IndexedDouble >::iterator weightsIter = populationWeights.begin();
202 
203  //Get the current weights values
204  weightsIter = populationWeights.begin();
205  while( populationIter != population.end() ){
206  weightsIter->value = baiseWeights ? pow( populationIter->fitness, baiseCoeff ) : populationIter->fitness;
207  weightsIter->index = index++;
208 
209  populationIter++;
210  weightsIter++;
211  }
212 
213  //Sort the weighted values by value in ascending order (so the least likely value is first, the second least likely is second, etc...
214  sort(populationWeights.begin(),populationWeights.end(),IndexedDouble::sortIndexedDoubleByValueAscending);
215 
216  //Create the accumulated sum lookup table
217  accumSumLookup[0] = populationWeights[0].value;
218  for(unsigned int i=1; i<populationSize; i++){
219  accumSumLookup[i] = accumSumLookup[i-1] + populationWeights[i].value;
220  }
221 
222  if( accumSumLookup[populationSize-1] == 0 ){
223  warningLog << "evolvePopulation() - The accum sum is zero!" << std::endl;
224  }
225 
226  //Reset the population iterator
227  populationIter = population.begin();
228 
229  if( useElitism ){
230  //The first child is simply a copy of the best parent
231  populationIter->gene = parents[ bestIndividualIndex ].gene;
232  populationIter++;
233  }
234 
235  //This is the main evolve loop, at each iteration we do the following until we reach the end of the population
236  //- Randomly select a mom and dad from the parents population (parents with a better fitness have a better chance of being selected)
237  //- Randomly select a cross over point (this is an index that sets what point along the gene we will merge the data from mom and dad)
238  //- Create two individuals (if possible) by combining the gene data from mom and dad
239  //- The first child will use the following genes [mom |crossOverPoint| dad], whereas the second child will use [dad |crossOverPoint| mom]
240  //- After the cross over, each gene will be randomly mutated
241  index = 0;
242  while( populationIter != population.end() ){
243 
244  //Randomly select the parents, individuals with higher weights will have a better chance of being selected
245  mom = rand.getRandomNumberWeighted( populationWeights, accumSumLookup );
246  dad = rand.getRandomNumberWeighted( populationWeights, accumSumLookup );
247 
248  //Select the cross over point
249  crossOverPoint = rand.getRandomNumberInt(0, geneSize);
250 
251  //Generate the new individual using cross over (this is the first child)
252  for(i=0; i<geneSize; i++){
253  if( i < crossOverPoint ) populationIter->gene[i] = parents[ mom ].gene[i];
254  else populationIter->gene[i] = parents[ dad ].gene[i];
255  }
256 
257  //Perform random mutation
258  for(i=0; i<geneSize; i++){
259  if( rand.getRandomNumberUniform(0.0,1.0) <= mutationRate ){
260  populationIter->gene[ i ] = generateRandomGeneValue();
261  }
262  }
263 
264  //Update the iterator
265  populationIter++;
266 
267  //Generate the second child (as long as we have not got to the end of the population)
268  if( populationIter != population.end() ){
269 
270  for(i=0; i<geneSize; i++){
271  if( i < crossOverPoint ) populationIter->gene[i] = parents[ dad ].gene[i];
272  else populationIter->gene[i] = parents[ mom ].gene[i];
273  }
274 
275  //Perform random mutation
276  for(i=0; i<geneSize; i++){
277  if( rand.getRandomNumberUniform(0.0,1.0) <= mutationRate ){
278  populationIter->gene[ i ] = generateRandomGeneValue();
279  }
280  }
281 
282  //Update the iterator
283  populationIter++;
284  }
285 
286  }
287 
288  //Store the parents for the next iteration
289  parents = population;
290 
291  return true;
292  }
293 
302  virtual Float evaluateFitness( INDIVIDUAL &individual, const MatrixFloat &trainingData ){
303 
304  individual.fitness = 0;
305 
306  if( !initialized ) return 0;
307 
308  if( trainingData.getNumCols() != geneSize ) return 0;
309 
310  UINT M = trainingData.getNumRows();
311  Float error = 0;
312  Float minError = grt_numeric_limits< Float >::max();
313 
314  for(UINT i=0; i<M; i++){
315  error = 0;
316  //Compute the squared distance
317  for(UINT j=0; j<geneSize; j++){
318  error += ( trainingData[i][j] - individual.gene[j] ) * ( trainingData[i][j] - individual.gene[j] );
319  }
320  if( error < minError ) minError = error;
321  }
322  //Make sure the minError is not zero
323  minError += 0.00001;
324  minError /= Float(geneSize);
325 
326  //Good individuals should have a high fitness
327  individual.fitness = 1.0/(minError*minError);
328 
329  return individual.fitness;
330  }
331 
332  virtual bool train(const MatrixFloat &trainingData){
333 
334  if( !initialized ) return false;
335 
336  UINT currentIteration = 0;
337  UINT numIterationsNoChange = 0;
338  bool keepTraining = true;
339  Float lastBestFitness = 0;
340 
341  if( storeHistory ){
342  populationHistory.reserve( maxIteration/storeRate );
343  fitnessHistory.reserve( maxIteration/storeRate );
344  }
345 
346  //Init the population
347  initPopulation( populationSize, geneSize );
348 
349  //Compute the fitness of the initial population
350  estimatePopulationFitness( trainingData, bestIndividualFitness, bestIndividualIndex );
351  lastBestFitness = bestIndividualFitness;
352 
353  if( storeHistory ){
354  populationHistory.push_back( population );
355  fitnessHistory.push_back( IndexedDouble(bestIndividualIndex, bestIndividualFitness) );
356  }
357 
358  //Start the main loop
359  while( keepTraining ){
360 
361  //Perform the selection
362  if( !evolvePopulation() ){
363  errorLog << "Failed to evolve population" << std::endl;
364  return false;
365  }
366 
367  //Compute population fitness
368  if( !estimatePopulationFitness( trainingData, bestIndividualFitness, bestIndividualIndex ) ){
369  return false;
370  }
371 
372  Float delta = fabs( bestIndividualFitness-lastBestFitness );
373  lastBestFitness = bestIndividualFitness;
374 
375  trainingLog << "Iteration: " << currentIteration << "\tBestFitness: " << bestIndividualFitness << "\tBestIndex: " << bestIndividualIndex << "\tDelta: " << delta << "\tNumIterationsNoChange: " << numIterationsNoChange << std::endl;
376 
377  if( currentIteration >= maxIteration ){
378  keepTraining = false;
379  trainingLog << "Max Iteration Reached!" << std::endl;
380  }
381 
382  if( delta <= minChange ){
383  if( ++numIterationsNoChange >= minNumIterationsNoChange ){
384  keepTraining = false;
385  trainingLog << "Min Changed Reached!" << std::endl;
386  }
387  }else{
388  numIterationsNoChange = 0;
389  }
390 
391  if( customConvergenceCheck() ){
392  keepTraining = false;
393  trainingLog << "Custom Convergance Triggered!" << std::endl;
394  }
395 
396  //Update the iteration
397  currentIteration++;
398 
399  //Save the current population
400  if( currentIteration % storeRate == 0 && storeHistory ){
401  populationHistory.push_back( population );
402  fitnessHistory.push_back( IndexedDouble(bestIndividualIndex, bestIndividualFitness) );
403  }
404  }
405 
406  return true;
407  }
408 
409  UINT getPopulationSize() const{
410  return populationSize;
411  }
412 
413  bool getInitialized() const{
414  return initialized;
415  }
416 
417  Vector< INDIVIDUAL > getPopulation() const{
418  return population;
419  }
420 
421  bool setPopulationSize(const UINT populationSize){
422  this->populationSize = populationSize;
423  return true;
424  }
425 
426  bool setMinNumIterationsNoChange(const UINT minNumIterationsNoChange){
427  this->minNumIterationsNoChange = minNumIterationsNoChange;
428  return true;
429  }
430 
431  bool setMaxIterations(const UINT maxIteration){
432  this->maxIteration = maxIteration;
433  return true;
434  }
435 
436  bool setStoreRate(const UINT storeRate){
437  this->storeRate = storeRate;
438  return true;
439  }
440 
441  bool setStoreHistory(const bool storeHistory){
442  this->storeHistory = storeHistory;
443  return true;
444  }
445 
446  bool setBaiseWeights(const bool baiseWeights){
447  this->baiseWeights = baiseWeights;
448  return true;
449  }
450 
451  bool setBaiseCoeff(const Float baiseCoeff){
452  this->baiseCoeff = baiseCoeff;
453  return true;
454  }
455 
456  bool setMutationRate(const Float mutationRate){
457  this->mutationRate = mutationRate;
458  return true;
459  }
460 
461  bool setMinChange(const Float minChange){
462  this->minChange = minChange;
463  return true;
464  }
465 
466  virtual bool setPopulation( const Vector< INDIVIDUAL > &newPopulation ){
467 
468  if( newPopulation.size() == 0 ) return false;
469 
470  population = newPopulation;
471  populationSize = (UINT)population.size();
472  populationWeights.resize( populationSize );
473  accumSumLookup.resize( populationSize );
474 
475  UINT index = 0;
476  typename Vector< INDIVIDUAL >::iterator populationIter = population.begin();
477  Vector< IndexedDouble >::iterator weightsIter = populationWeights.begin();
478  VectorFloat::iterator geneIter;
479 
480  while( populationIter != population.end() ){
481  weightsIter->value = populationIter->fitness;
482  weightsIter->index = index++;
483 
484  populationIter++;
485  weightsIter++;
486  }
487 
488  return true;
489  }
490 
491  virtual inline Float generateRandomGeneValue(){
492  return rand.getRandomNumberUniform(0.0,1.0);
493  }
494 
495  virtual bool customConvergenceCheck(){
496  return false;
497  }
498 
499  virtual bool printBest() const{
500  if( !initialized ) return false;
501 
502  std::cout << "BestIndividual: ";
503  for(UINT i=0; i<geneSize; i++){
504  std::cout << population[ bestIndividualIndex ].gene[i] << "\t";
505  }
506  std::cout << std::endl;
507  return true;
508  }
509 
510 public:
511 
512  bool initialized;
513  bool useElitism;
514  bool storeHistory;
515  bool baiseWeights;
516  UINT populationSize;
517  UINT geneSize;
518  UINT minNumIterationsNoChange;
519  UINT maxIteration;
520  UINT storeRate;
521  UINT bestIndividualIndex;
522  Float bestIndividualFitness;
523  Float mutationRate;
524  Float minChange;
525  Float baiseCoeff;
526  Random rand;
527  Vector< INDIVIDUAL > population;
528  Vector< INDIVIDUAL > parents;
529  Vector< IndexedDouble > populationWeights;
530  Vector< Vector< INDIVIDUAL > > populationHistory;
531  Vector< IndexedDouble > fitnessHistory;
532  VectorFloat accumSumLookup;
533 };
534 
535 GRT_END_NAMESPACE
536 
537 #endif //GRT_EVOLUTIONARY_ALGORITHM_HEADER
EvolutionaryAlgorithm(const UINT populationSize=0, const UINT geneSize=0)
virtual bool estimatePopulationFitness(const MatrixFloat &trainingData, Float &bestFitness, UINT &bestIndex)
This file contains the Random class, a useful wrapper for generating cross platform random functions...
Definition: Random.h:46
virtual bool resize(const unsigned int size)
Definition: Vector.h:133
virtual Float evaluateFitness(INDIVIDUAL &individual, const MatrixFloat &trainingData)
This class implements a template based EvolutionaryAlgorithm.
int getRandomNumberWeighted(const Vector< int > &values, const VectorFloat &weights)
Definition: Random.cpp:68
unsigned int getNumRows() const
Definition: Matrix.h:574
unsigned int getNumCols() const
Definition: Matrix.h:581
Float getRandomNumberUniform(Float minRange=0.0, Float maxRange=1.0)
Definition: Random.cpp:129
INDIVIDUAL & operator[](const UINT &index)
int getRandomNumberInt(int minRange, int maxRange)
Definition: Random.cpp:59
Definition: Vector.h:41
virtual bool initPopulation(const UINT populationSize, const UINT geneSize)
This is the main base class that all GRT machine learning algorithms should inherit from...
Definition: MLBase.h:72