26 #ifndef GRT_EVOLUTIONARY_ALGORITHM_HEADER 27 #define GRT_EVOLUTIONARY_ALGORITHM_HEADER 29 #include "Individual.h" 38 template <
typename INDIVIDUAL>
52 minNumIterationsNoChange = 1;
54 bestIndividualIndex = 0;
55 bestIndividualFitness = 0;
82 return population[ index ];
96 this->populationSize = 0;
98 bestIndividualIndex = 0;
99 bestIndividualFitness = 0;
101 populationWeights.clear();
102 accumSumLookup.clear();
103 populationHistory.clear();
104 fitnessHistory.clear();
106 if( populationSize == 0 || geneSize == 0 )
return false;
109 this->populationSize = populationSize;
110 this->geneSize = geneSize;
111 population.
resize( populationSize );
112 populationWeights.
resize( populationSize );
113 accumSumLookup.
resize( populationSize );
120 VectorFloat::iterator geneIter;
122 while( populationIter != population.end() ){
123 populationIter->fitness = 0;
124 populationIter->gene.
resize( geneSize );
127 for(i=0; i<geneSize; i++){
128 populationIter->gene[ i ] = generateRandomGeneValue();
131 weightsIter->value = populationIter->fitness;
132 weightsIter->index = index++;
139 parents = population;
161 if( !initialized )
return false;
166 while( populationIter != population.end() ){
169 weightsIter->index = index++;
172 if( weightsIter->value > bestFitness ){
173 bestFitness = weightsIter->value;
174 bestIndex = weightsIter->index;
193 if( !initialized )
return false;
199 UINT crossOverPoint = 0;
204 weightsIter = populationWeights.begin();
205 while( populationIter != population.end() ){
206 weightsIter->value = baiseWeights ? pow( populationIter->fitness, baiseCoeff ) : populationIter->fitness;
207 weightsIter->index = index++;
214 sort(populationWeights.begin(),populationWeights.end(),IndexedDouble::sortIndexedDoubleByValueAscending);
217 accumSumLookup[0] = populationWeights[0].value;
218 for(
unsigned int i=1; i<populationSize; i++){
219 accumSumLookup[i] = accumSumLookup[i-1] + populationWeights[i].value;
222 if( accumSumLookup[populationSize-1] == 0 ){
223 warningLog <<
"evolvePopulation() - The accum sum is zero!" << std::endl;
227 populationIter = population.begin();
231 populationIter->gene = parents[ bestIndividualIndex ].gene;
242 while( populationIter != population.end() ){
252 for(i=0; i<geneSize; i++){
253 if( i < crossOverPoint ) populationIter->gene[i] = parents[ mom ].gene[i];
254 else populationIter->gene[i] = parents[ dad ].gene[i];
258 for(i=0; i<geneSize; i++){
260 populationIter->gene[ i ] = generateRandomGeneValue();
268 if( populationIter != population.end() ){
270 for(i=0; i<geneSize; i++){
271 if( i < crossOverPoint ) populationIter->gene[i] = parents[ dad ].gene[i];
272 else populationIter->gene[i] = parents[ mom ].gene[i];
276 for(i=0; i<geneSize; i++){
278 populationIter->gene[ i ] = generateRandomGeneValue();
289 parents = population;
304 individual.fitness = 0;
306 if( !initialized )
return 0;
308 if( trainingData.
getNumCols() != geneSize )
return 0;
314 for(UINT i=0; i<M; i++){
317 for(UINT j=0; j<geneSize; j++){
318 error += ( trainingData[i][j] - individual.gene[j] ) * ( trainingData[i][j] - individual.gene[j] );
320 if( error < minError ) minError = error;
324 minError /= Float(geneSize);
327 individual.fitness = 1.0/(minError*minError);
329 return individual.fitness;
332 virtual bool train(
const MatrixFloat &trainingData){
334 if( !initialized )
return false;
336 UINT currentIteration = 0;
337 UINT numIterationsNoChange = 0;
338 bool keepTraining =
true;
339 Float lastBestFitness = 0;
342 populationHistory.reserve( maxIteration/storeRate );
343 fitnessHistory.reserve( maxIteration/storeRate );
351 lastBestFitness = bestIndividualFitness;
354 populationHistory.push_back( population );
355 fitnessHistory.push_back(
IndexedDouble(bestIndividualIndex, bestIndividualFitness) );
359 while( keepTraining ){
363 errorLog <<
"Failed to evolve population" << std::endl;
372 Float delta = fabs( bestIndividualFitness-lastBestFitness );
373 lastBestFitness = bestIndividualFitness;
375 trainingLog <<
"Iteration: " << currentIteration <<
"\tBestFitness: " << bestIndividualFitness <<
"\tBestIndex: " << bestIndividualIndex <<
"\tDelta: " << delta <<
"\tNumIterationsNoChange: " << numIterationsNoChange << std::endl;
377 if( currentIteration >= maxIteration ){
378 keepTraining =
false;
379 trainingLog <<
"Max Iteration Reached!" << std::endl;
382 if( delta <= minChange ){
383 if( ++numIterationsNoChange >= minNumIterationsNoChange ){
384 keepTraining =
false;
385 trainingLog <<
"Min Changed Reached!" << std::endl;
388 numIterationsNoChange = 0;
391 if( customConvergenceCheck() ){
392 keepTraining =
false;
393 trainingLog <<
"Custom Convergance Triggered!" << std::endl;
400 if( currentIteration % storeRate == 0 && storeHistory ){
401 populationHistory.push_back( population );
402 fitnessHistory.push_back(
IndexedDouble(bestIndividualIndex, bestIndividualFitness) );
409 UINT getPopulationSize()
const{
410 return populationSize;
413 bool getInitialized()
const{
421 bool setPopulationSize(
const UINT populationSize){
422 this->populationSize = populationSize;
426 bool setMinNumIterationsNoChange(
const UINT minNumIterationsNoChange){
427 this->minNumIterationsNoChange = minNumIterationsNoChange;
431 bool setMaxIterations(
const UINT maxIteration){
432 this->maxIteration = maxIteration;
436 bool setStoreRate(
const UINT storeRate){
437 this->storeRate = storeRate;
441 bool setStoreHistory(
const bool storeHistory){
442 this->storeHistory = storeHistory;
446 bool setBaiseWeights(
const bool baiseWeights){
447 this->baiseWeights = baiseWeights;
451 bool setBaiseCoeff(
const Float baiseCoeff){
452 this->baiseCoeff = baiseCoeff;
456 bool setMutationRate(
const Float mutationRate){
457 this->mutationRate = mutationRate;
461 bool setMinChange(
const Float minChange){
462 this->minChange = minChange;
468 if( newPopulation.size() == 0 )
return false;
470 population = newPopulation;
471 populationSize = (UINT)population.size();
472 populationWeights.
resize( populationSize );
473 accumSumLookup.
resize( populationSize );
478 VectorFloat::iterator geneIter;
480 while( populationIter != population.end() ){
481 weightsIter->value = populationIter->fitness;
482 weightsIter->index = index++;
491 virtual inline Float generateRandomGeneValue(){
495 virtual bool customConvergenceCheck(){
499 virtual bool printBest()
const{
500 if( !initialized )
return false;
502 std::cout <<
"BestIndividual: ";
503 for(UINT i=0; i<geneSize; i++){
504 std::cout << population[ bestIndividualIndex ].gene[i] <<
"\t";
506 std::cout << std::endl;
518 UINT minNumIterationsNoChange;
521 UINT bestIndividualIndex;
522 Float bestIndividualFitness;
537 #endif //GRT_EVOLUTIONARY_ALGORITHM_HEADER EvolutionaryAlgorithm(const UINT populationSize=0, const UINT geneSize=0)
virtual bool estimatePopulationFitness(const MatrixFloat &trainingData, Float &bestFitness, UINT &bestIndex)
virtual ~EvolutionaryAlgorithm()
This file contains the Random class, a useful wrapper for generating cross platform random functions...
virtual bool resize(const unsigned int size)
virtual Float evaluateFitness(INDIVIDUAL &individual, const MatrixFloat &trainingData)
virtual bool evolvePopulation()
This class implements a template based EvolutionaryAlgorithm.
int getRandomNumberWeighted(const Vector< int > &values, const VectorFloat &weights)
unsigned int getNumRows() const
unsigned int getNumCols() const
Float getRandomNumberUniform(Float minRange=0.0, Float maxRange=1.0)
INDIVIDUAL & operator[](const UINT &index)
int getRandomNumberInt(int minRange, int maxRange)
virtual bool initPopulation(const UINT populationSize, const UINT geneSize)
This is the main base class that all GRT machine learning algorithms should inherit from...