21 #define GRT_DLL_EXPORTS
26 const Float MLP_NEURON_MIN_TARGET = -1.0;
27 const Float MLP_NEURON_MAX_TARGET = 1.0;
33 inputLayerActivationFunction = Neuron::LINEAR;
34 hiddenLayerActivationFunction = Neuron::LINEAR;
35 outputLayerActivationFunction = Neuron::LINEAR;
37 numRandomTrainingIterations = 10;
38 validationSetSize = 20;
39 trainingMode = ONLINE_GRADIENT_DESCENT;
43 nullRejectionCoeff = 0.9;
44 nullRejectionThreshold = 0;
45 useValidationSet =
true;
46 randomiseTrainingOrder =
false;
50 classificationModeActive =
false;
51 useNullRejection =
true;
54 regressifierType = classType;
55 debugLog.setProceedingText(
"[DEBUG MLP]");
56 errorLog.setProceedingText(
"[ERROR MLP]");
57 trainingLog.setProceedingText(
"[TRAINING MLP]");
58 warningLog.setProceedingText(
"[WARNING MLP]");
63 regressifierType = classType;
64 debugLog.setProceedingText(
"[DEBUG MLP]");
65 errorLog.setProceedingText(
"[ERROR MLP]");
66 trainingLog.setProceedingText(
"[TRAINING MLP]");
67 warningLog.setProceedingText(
"[WARNING MLP]");
79 this->numInputNeurons = rhs.numInputNeurons;
80 this->numHiddenNeurons = rhs.numHiddenNeurons;
81 this->numOutputNeurons = rhs.numOutputNeurons;
82 this->inputLayerActivationFunction = rhs.inputLayerActivationFunction;
83 this->hiddenLayerActivationFunction = rhs.hiddenLayerActivationFunction;
84 this->outputLayerActivationFunction = rhs.outputLayerActivationFunction;
85 this->numRandomTrainingIterations = rhs.numRandomTrainingIterations;
86 this->trainingMode = rhs.trainingMode;
87 this->momentum = rhs.momentum;
88 this->trainingError = rhs.trainingError;
89 this->gamma = rhs.gamma;
90 this->initialized = rhs.initialized;
91 this->inputLayer = rhs.inputLayer;
92 this->hiddenLayer = rhs.hiddenLayer;
93 this->outputLayer = rhs.outputLayer;
94 this->inputVectorRanges = rhs.inputVectorRanges;
95 this->targetVectorRanges = rhs.targetVectorRanges;
96 this->trainingErrorLog = rhs.trainingErrorLog;
98 this->classificationModeActive = rhs.classificationModeActive;
99 this->useNullRejection = rhs.useNullRejection;
100 this->predictedClassLabel = rhs.predictedClassLabel;
101 this->nullRejectionCoeff = rhs.nullRejectionCoeff;
102 this->nullRejectionThreshold = rhs.nullRejectionThreshold;
103 this->maxLikelihood = rhs.maxLikelihood;
104 this->classLikelihoods = rhs.classLikelihoods;
114 if( regressifier == NULL ){
115 errorLog <<
"deepCopyFrom(const Regressifier *regressifier) - regressifier is NULL!" << std::endl;
120 errorLog <<
"deepCopyFrom(const Regressifier *regressifier) - regressifier is not the correct type!" << std::endl;
124 *
this = *
dynamic_cast<const MLP*
>(regressifier);
133 errorLog <<
"train_(ClassificationData trainingData) - The MLP has not been initialized!" << std::endl;
138 errorLog <<
"train_(ClassificationData trainingData) - The number of input dimensions in the training data (" << trainingData.
getNumDimensions() <<
") does not match that of the MLP (" << numInputNeurons <<
")" << std::endl;
142 errorLog <<
"train_(ClassificationData trainingData) - The number of classes in the training data (" << trainingData.
getNumClasses() <<
") does not match that of the MLP (" << numOutputNeurons <<
")" << std::endl;
150 classificationModeActive =
true;
152 return trainModel(regressionData);
158 classificationModeActive =
false;
160 return trainModel(trainingData);
167 errorLog <<
"predict_(VectorFloat &inputVector) - Model not trained!" << std::endl;
171 if( inputVector.size() != numInputNeurons ){
172 errorLog <<
"predict_(VectorFloat &inputVector) - The sie of the input Vector (" << int(inputVector.size()) <<
") does not match that of the number of input dimensions (" << numInputNeurons <<
") " << std::endl;
179 if( classificationModeActive ){
182 const UINT K = (UINT)regressionData.size();
183 classLikelihoods = regressionData;
187 for(UINT i=0; i<K; i++){
188 classLikelihoods[i] += minValue;
194 for(UINT i=0; i<K; i++){
195 classLikelihoods[i] /= sum;
200 Float bestValue = classLikelihoods[0];
202 for(UINT i=1; i<K; i++){
203 if( classLikelihoods[i] > bestValue ){
204 bestValue = classLikelihoods[i];
210 maxLikelihood = bestValue;
211 predictedClassLabel = bestIndex+1;
213 if( useNullRejection ){
214 if( maxLikelihood < nullRejectionCoeff ){
215 predictedClassLabel = 0;
223 bool MLP::init(
const UINT numInputNeurons,
const UINT numHiddenNeurons,
const UINT numOutputNeurons){
224 return init(numInputNeurons, numHiddenNeurons, numOutputNeurons, inputLayerActivationFunction, hiddenLayerActivationFunction, outputLayerActivationFunction );
228 const UINT numHiddenNeurons,
229 const UINT numOutputNeurons,
230 const UINT inputLayerActivationFunction,
231 const UINT hiddenLayerActivationFunction,
232 const UINT outputLayerActivationFunction){
238 random.
setSeed( (UINT)time(NULL) );
240 if( numInputNeurons == 0 || numHiddenNeurons == 0 || numOutputNeurons == 0 ){
241 if( numInputNeurons == 0 ){ errorLog <<
"init(...) - The number of input neurons is zero!" << std::endl; }
242 if( numHiddenNeurons == 0 ){ errorLog <<
"init(...) - The number of hidden neurons is zero!" << std::endl; }
243 if( numOutputNeurons == 0 ){ errorLog <<
"init(...) - The number of output neurons is zero!" << std::endl; }
249 errorLog <<
"init(...) - One Of The Activation Functions Failed The Validation Check" << std::endl;
254 this->numInputNeurons = numInputNeurons;
255 this->numHiddenNeurons = numHiddenNeurons;
256 this->numOutputNeurons = numOutputNeurons;
259 this->numInputDimensions = numInputNeurons;
260 this->numOutputDimensions = numOutputNeurons;
263 this->inputLayerActivationFunction = inputLayerActivationFunction;
264 this->hiddenLayerActivationFunction = hiddenLayerActivationFunction;
265 this->outputLayerActivationFunction = outputLayerActivationFunction;
268 inputLayer.
resize(numInputNeurons);
269 hiddenLayer.
resize(numHiddenNeurons);
270 outputLayer.
resize(numOutputNeurons);
273 for(UINT i=0; i<numInputNeurons; i++){
274 inputLayer[i].init(1,inputLayerActivationFunction);
275 inputLayer[i].weights[0] = 1.0;
276 inputLayer[i].bias = 0.0;
277 inputLayer[i].gamma = gamma;
280 for(UINT i=0; i<numHiddenNeurons; i++){
282 hiddenLayer[i].init(numInputNeurons,hiddenLayerActivationFunction);
283 hiddenLayer[i].gamma = gamma;
286 for(UINT i=0; i<numOutputNeurons; i++){
288 outputLayer[i].init(numHiddenNeurons,outputLayerActivationFunction);
289 outputLayer[i].gamma = gamma;
303 numHiddenNeurons = 0;
304 numOutputNeurons = 0;
323 errorLog <<
"train(RegressionData trainingData) - The MLP has not be initialized!" << std::endl;
328 errorLog <<
"train(RegressionData trainingData) - The training data is empty!" << std::endl;
334 if( useValidationSet ){
335 validationData = trainingData.
split( 100 - validationSetSize );
341 if( N != numInputNeurons ){
342 errorLog <<
"train(LabelledRegressionData trainingData) - The number of input dimensions in the training data (" << N <<
") does not match that of the MLP (" << numInputNeurons <<
")" << std::endl;
345 if( T != numOutputNeurons ){
346 errorLog <<
"train(LabelledRegressionData trainingData) - The number of target dimensions in the training data (" << T <<
") does not match that of the MLP (" << numOutputNeurons <<
")" << std::endl;
351 numInputDimensions = numInputNeurons;
352 numOutputDimensions = numOutputNeurons;
363 trainingData.
scale(inputVectorRanges,targetVectorRanges,MLP_NEURON_MIN_TARGET,MLP_NEURON_MAX_TARGET);
365 if( useValidationSet ){
366 validationData.
scale(inputVectorRanges,targetVectorRanges,MLP_NEURON_MIN_TARGET,MLP_NEURON_MAX_TARGET);
371 bool tempScalingState = useScaling;
375 trainingErrorLog.clear();
376 inputNeuronsOuput.
resize(numInputNeurons);
377 hiddenNeuronsOutput.
resize(numHiddenNeurons);
378 outputNeuronsOutput.
resize(numOutputNeurons);
379 deltaO.
resize(numOutputNeurons);
380 deltaH.
resize(numHiddenNeurons);
383 switch( trainingMode ){
384 case ONLINE_GRADIENT_DESCENT:
385 if( classificationModeActive ){
386 trained = trainOnlineGradientDescentClassification( trainingData, validationData );
388 trained = trainOnlineGradientDescentRegression( trainingData, validationData );
392 useScaling = tempScalingState;
393 errorLog <<
"train(RegressionData trainingData) - Uknown training mode!" << std::endl;
399 useScaling = tempScalingState;
408 const UINT numTestingExamples = useValidationSet ? validationData.
getNumSamples() : M;
412 totalSquaredTrainingError = 0;
413 rootMeanSquaredTrainingError = 0;
415 bool keepTraining =
true;
420 Float alpha = learningRate;
421 Float beta = momentum;
425 Float trainingSetAccuracy = 0;
426 Float trainingSetTotalSquaredError = 0;
430 Float bestAccuracy = 0;
432 Float backPropError = 0;
437 TrainingResult result;
438 trainingResults.reserve(M);
441 for(UINT i=0; i<M; i++) indexList[i] = i;
443 for(UINT iter=0; iter<numRandomTrainingIterations; iter++){
447 tempTrainingErrorLog.clear();
450 init(numInputNeurons,numHiddenNeurons,numOutputNeurons,inputLayerActivationFunction,hiddenLayerActivationFunction,outputLayerActivationFunction);
452 if( randomiseTrainingOrder ){
453 for(UINT i=0; i<M; i++){
458 while( keepTraining ){
462 totalSquaredTrainingError = 0;
464 for(UINT i=0; i<M; i++){
466 const VectorFloat &trainingExample = trainingData[ indexList[i] ].getInputVector();
467 const VectorFloat &targetVector = trainingData[ indexList[i] ].getTargetVector();
470 backPropError =
back_prop(trainingExample,targetVector,alpha,beta);
474 if( isNAN(backPropError) ){
475 keepTraining =
false;
476 errorLog <<
"train(RegressionData trainingData) - NaN found!" << std::endl;
481 if( classificationModeActive ){
485 bestValue = targetVector[0];
487 for(UINT i=1; i<targetVector.size(); i++){
488 if( targetVector[i] > bestValue ){
489 bestValue = targetVector[i];
493 classLabel = bestIndex + 1;
498 for(UINT i=1; i<numOutputNeurons; i++){
499 if( y[i] > bestValue ){
504 predictedClassLabel = bestIndex+1;
506 if( classLabel == predictedClassLabel ){
511 totalSquaredTrainingError += backPropError;
516 keepTraining =
false;
517 errorLog <<
"train(RegressionData trainingData) - NaN found!" << std::endl;
522 if( useValidationSet ){
523 trainingSetAccuracy = accuracy;
524 trainingSetTotalSquaredError = totalSquaredTrainingError;
526 totalSquaredTrainingError = 0;
530 for(UINT i=0; i<numValidationSamples; i++){
531 const VectorFloat &inputVector = validationData[i].getInputVector();
532 const VectorFloat &targetVector = validationData[i].getTargetVector();
536 if( classificationModeActive ){
538 bestValue = targetVector[0];
540 for(UINT i=1; i<numInputNeurons; i++){
541 if( targetVector[i] > bestValue ){
542 bestValue = targetVector[i];
546 classLabel = bestIndex + 1;
551 for(UINT i=1; i<numOutputNeurons; i++){
552 if( y[i] > bestValue ){
557 predictedClassLabel = bestIndex+1;
559 if( classLabel == predictedClassLabel ){
565 for(UINT j=0; j<T; j++){
566 totalSquaredTrainingError += SQR( targetVector[j]-y[j] );
571 accuracy = (accuracy/Float(numValidationSamples))*Float(numValidationSamples);
572 rootMeanSquaredTrainingError = sqrt( totalSquaredTrainingError / Float(numValidationSamples) );
575 accuracy = (accuracy/Float(M))*Float(M);
576 rootMeanSquaredTrainingError = sqrt( totalSquaredTrainingError / Float(M) );
581 temp[0] = 100.0 - trainingSetAccuracy;
582 temp[1] = 100.0 - accuracy;
583 tempTrainingErrorLog.push_back( temp );
585 error = 100.0 - accuracy;
588 result.setClassificationResult(iter,accuracy,
this);
589 trainingResults.push_back( result );
591 delta = fabs( error - lastError );
593 trainingLog <<
"Random Training Iteration: " << iter+1 <<
" Epoch: " << epoch <<
" Error: " << error <<
" Delta: " << delta << std::endl;
596 if( ++epoch >= maxNumEpochs ){
597 keepTraining =
false;
599 if( delta <= minChange && epoch >= minNumEpochs ){
600 keepTraining =
false;
607 trainingResultsObserverManager.notifyObservers( result );
611 if( lastError < bestError ){
613 bestError = lastError;
614 bestTSError = totalSquaredTrainingError;
615 bestRMSError = rootMeanSquaredTrainingError;
616 bestAccuracy = accuracy;
618 trainingErrorLog = tempTrainingErrorLog;
623 trainingLog <<
"Best Accuracy: " << bestAccuracy <<
" in Random Training Iteration: " << bestIter+1 << std::endl;
627 errorLog <<
"train(LabelledRegressionData trainingData) - NAN Found!" << std::endl;
633 trainingError = bestAccuracy;
636 if( useNullRejection ){
638 Float averageValue = 0;
639 VectorFloat classificationPredictions, inputVector, targetVector;
641 for(UINT i=0; i<numTestingExamples; i++){
642 inputVector = useValidationSet ? validationData[i].getInputVector() : trainingData[i].getInputVector();
643 targetVector = useValidationSet ? validationData[i].getTargetVector() : trainingData[i].getTargetVector();
649 bestValue = targetVector[0];
651 for(UINT i=1; i<targetVector.size(); i++){
652 if( targetVector[i] > bestValue ){
653 bestValue = targetVector[i];
657 classLabel = bestIndex + 1;
662 for(UINT i=1; i<y.size(); i++){
663 if( y[i] > bestValue ){
668 predictedClassLabel = bestIndex+1;
671 if( classLabel == predictedClassLabel ){
672 classificationPredictions.push_back( bestValue );
673 averageValue += bestValue;
677 averageValue /= Float(classificationPredictions.size());
679 for(UINT i=0; i<classificationPredictions.size(); i++){
680 stdDev += SQR(classificationPredictions[i]-averageValue);
682 stdDev = sqrt( stdDev / Float(classificationPredictions.size()-1) );
684 nullRejectionThreshold = averageValue-(stdDev*nullRejectionCoeff);
695 const UINT numValidationSamples = useValidationSet ? validationData.
getNumSamples() : M;
698 bool keepTraining =
true;
700 Float alpha = learningRate;
701 Float beta = momentum;
704 totalSquaredTrainingError = 0;
705 rootMeanSquaredTrainingError = 0;
709 Float trainingSetTotalSquaredError = 0;
716 TrainingResult result;
717 trainingResults.reserve(M);
720 for(UINT i=0; i<M; i++) indexList[i] = i;
722 for(UINT iter=0; iter<numRandomTrainingIterations; iter++){
726 tempTrainingErrorLog.clear();
729 init(numInputNeurons,numHiddenNeurons,numOutputNeurons,inputLayerActivationFunction,hiddenLayerActivationFunction,outputLayerActivationFunction);
731 if( randomiseTrainingOrder ){
732 for(UINT i=0; i<M; i++){
737 while( keepTraining ){
740 totalSquaredTrainingError = 0;
742 for(UINT i=0; i<M; i++){
744 const VectorFloat &trainingExample = trainingData[ indexList[i] ].getInputVector();
745 const VectorFloat &targetVector = trainingData[ indexList[i] ].getTargetVector();
748 Float backPropError =
back_prop(trainingExample,targetVector,alpha,beta);
752 if( isNAN(backPropError) ){
753 keepTraining =
false;
754 errorLog <<
"train(RegressionData trainingData) - NaN found!" << std::endl;
758 totalSquaredTrainingError += backPropError;
762 keepTraining =
false;
763 errorLog <<
"train(RegressionData trainingData) - NaN found!" << std::endl;
768 if( useValidationSet ){
769 trainingSetTotalSquaredError = totalSquaredTrainingError;
770 totalSquaredTrainingError = 0;
773 for(UINT i=0; i<numValidationSamples; i++){
774 const VectorFloat &trainingExample = validationData[i].getInputVector();
775 const VectorFloat &targetVector = validationData[i].getTargetVector();
780 for(UINT j=0; j<T; j++){
781 totalSquaredTrainingError += SQR( targetVector[j]-y[j] );
786 rootMeanSquaredTrainingError = sqrt( totalSquaredTrainingError / Float(numValidationSamples) );
789 rootMeanSquaredTrainingError = sqrt( totalSquaredTrainingError / Float(M) );
794 temp[0] = trainingSetTotalSquaredError;
795 temp[1] = rootMeanSquaredTrainingError;
796 tempTrainingErrorLog.push_back( temp );
798 error = rootMeanSquaredTrainingError;
801 result.setRegressionResult(iter,totalSquaredTrainingError,rootMeanSquaredTrainingError,
this);
802 trainingResults.push_back( result );
804 delta = fabs( error - lastError );
806 trainingLog <<
"Random Training Iteration: " << iter+1 <<
" Epoch: " << epoch <<
" Error: " << error <<
" Delta: " << delta << std::endl;
809 if( ++epoch >= maxNumEpochs ){
810 keepTraining =
false;
812 if( delta <= minChange && epoch >= minNumEpochs ){
813 keepTraining =
false;
820 trainingResultsObserverManager.notifyObservers( result );
825 if( lastError < bestError ){
827 bestError = lastError;
828 bestTSError = totalSquaredTrainingError;
829 bestRMSError = rootMeanSquaredTrainingError;
831 trainingErrorLog = tempTrainingErrorLog;
836 trainingLog <<
"Best RMSError: " << bestRMSError <<
" in Random Training Iteration: " << bestIter+1 << std::endl;
840 errorLog <<
"train(RegressionData trainingData) - NAN Found!" << std::endl;
846 trainingError = bestRMSError;
856 Float omBeta = 1.0 - beta;
859 feedforward(trainingExample,inputNeuronsOuput,hiddenNeuronsOutput,outputNeuronsOutput);
862 for(UINT i=0; i<numOutputNeurons; i++){
863 deltaO[i] = outputLayer[i].getDerivative( outputNeuronsOutput[i] ) * (targetVector[i]-outputNeuronsOutput[i]);
867 for(UINT i=0; i<numHiddenNeurons; i++){
869 for(UINT j=0; j<numOutputNeurons; j++){
870 sum += outputLayer[j].weights[i] * deltaO[j];
872 deltaH[i] = hiddenLayer[i].getDerivative( hiddenNeuronsOutput[i] ) * sum;
876 for(UINT i=0; i<numHiddenNeurons; i++){
877 for(UINT j=0; j<numInputNeurons; j++){
880 update = alpha * inputNeuronsOuput[j] * deltaH[i];
883 hiddenLayer[i].weights[j] += update;
886 hiddenLayer[i].previousUpdate[j] = update;
891 for(UINT i=0; i<numOutputNeurons; i++){
892 for(UINT j=0; j<numHiddenNeurons; j++){
895 update = alpha * hiddenNeuronsOutput[j] * deltaO[i];
898 outputLayer[i].weights[j] += update;
901 outputLayer[i].previousUpdate[j] = update;
906 for(UINT i=0; i<numHiddenNeurons; i++){
909 update = alpha * deltaH[i];
912 hiddenLayer[i].bias += update;
915 hiddenLayer[i].previousBiasUpdate = update;
919 for(UINT i=0; i<numOutputNeurons; i++){
922 update = alpha * deltaO[i];
925 outputLayer[i].bias += update;
928 outputLayer[i].previousBiasUpdate = update;
933 for(UINT i=0; i<numOutputNeurons; i++){
934 error += SQR( targetVector[i] - outputNeuronsOutput[i] );
942 if( inputNeuronsOuput.size() != numInputNeurons ) inputNeuronsOuput.
resize(numInputNeurons,0);
943 if( hiddenNeuronsOutput.size() != numHiddenNeurons ) hiddenNeuronsOutput.
resize(numHiddenNeurons,0);
944 if( outputNeuronsOutput.size() != numOutputNeurons ) outputNeuronsOutput.
resize(numOutputNeurons,0);
948 for(UINT i=0; i<numInputNeurons; i++){
949 trainingExample[i] =
scale(trainingExample[i],inputVectorRanges[i].minValue,inputVectorRanges[i].maxValue,MLP_NEURON_MIN_TARGET,MLP_NEURON_MAX_TARGET);
955 for(UINT i=0; i<numInputNeurons; i++){
956 input[0] = trainingExample[i];
957 inputNeuronsOuput[i] = inputLayer[i].fire( input );
961 for(UINT i=0; i<numHiddenNeurons; i++){
962 hiddenNeuronsOutput[i] = hiddenLayer[i].fire( inputNeuronsOuput );
966 for(UINT i=0; i<numOutputNeurons; i++){
967 outputNeuronsOutput[i] = outputLayer[i].fire( hiddenNeuronsOutput );
972 for(UINT i=0; i<numOutputNeurons; i++){
973 outputNeuronsOutput[i] =
scale(outputNeuronsOutput[i],MLP_NEURON_MIN_TARGET,MLP_NEURON_MAX_TARGET,targetVectorRanges[i].minValue,targetVectorRanges[i].maxValue);
977 return outputNeuronsOutput;
983 if( inputNeuronsOuput.size() != numInputNeurons ) inputNeuronsOuput.
resize(numInputNeurons,0);
984 if( hiddenNeuronsOutput.size() != numHiddenNeurons ) hiddenNeuronsOutput.
resize(numHiddenNeurons,0);
985 if( outputNeuronsOutput.size() != numOutputNeurons ) outputNeuronsOutput.
resize(numOutputNeurons,0);
989 for(UINT i=0; i<numInputNeurons; i++){
991 inputNeuronsOuput[i] = inputLayer[i].fire( input );
995 for(UINT i=0; i<numHiddenNeurons; i++){
996 hiddenNeuronsOutput[i] = hiddenLayer[i].fire( inputNeuronsOuput );
1000 for(UINT i=0; i<numOutputNeurons; i++){
1001 outputNeuronsOutput[i] = outputLayer[i].fire( hiddenNeuronsOutput );
1007 std::cout<<
"***************** MLP *****************\n";
1008 std::cout<<
"NumInputNeurons: "<<numInputNeurons<< std::endl;
1009 std::cout<<
"NumHiddenNeurons: "<<numHiddenNeurons<< std::endl;
1010 std::cout<<
"NumOutputNeurons: "<<numOutputNeurons<< std::endl;
1012 std::cout<<
"InputWeights:\n";
1013 for(UINT i=0; i<numInputNeurons; i++){
1014 std::cout<<
"Neuron: "<<i<<
" Bias: " << inputLayer[i].bias <<
" Weights: ";
1015 for(UINT j=0; j<inputLayer[i].weights.
getSize(); j++){
1016 std::cout << inputLayer[i].weights[j] <<
"\t";
1017 } std::cout << std::endl;
1020 std::cout<<
"HiddenWeights:\n";
1021 for(UINT i=0; i<numHiddenNeurons; i++){
1022 std::cout<<
"Neuron: "<<i<<
" Bias: " << hiddenLayer[i].bias <<
" Weights: ";
1023 for(UINT j=0; j<hiddenLayer[i].weights.
getSize(); j++){
1024 std::cout << hiddenLayer[i].weights[j] <<
"\t";
1025 } std::cout << std::endl;
1028 std::cout<<
"OutputWeights:\n";
1029 for(UINT i=0; i<numOutputNeurons; i++){
1030 std::cout<<
"Neuron: "<<i<<
" Bias: " << outputLayer[i].bias <<
" Weights: ";
1031 for(UINT j=0; j<outputLayer[i].weights.
getSize(); j++){
1032 std::cout << outputLayer[i].weights[j] <<
"\t";
1033 } std::cout << std::endl;
1041 for(UINT i=0; i<numInputNeurons; i++){
1042 if( isNAN(inputLayer[i].bias) )
return true;
1043 N = inputLayer[i].weights.
getSize();
1044 for(UINT j=0; j<N; j++){
1045 if( isNAN(inputLayer[i].weights[j]) )
return true;
1049 for(UINT i=0; i<numHiddenNeurons; i++){
1050 if( isNAN(hiddenLayer[i].bias) )
return true;
1051 N = hiddenLayer[i].weights.
getSize();
1052 for(UINT j=0; j<N; j++){
1053 if( isNAN(hiddenLayer[i].weights[j]) )
return true;
1057 for(UINT i=0; i<numOutputNeurons; i++){
1058 if( isNAN(outputLayer[i].bias) )
return true;
1059 N = outputLayer[i].weights.
getSize();
1060 for(UINT j=0; j<N; j++){
1061 if( isNAN(outputLayer[i].weights[j]) )
return true;
1068 bool inline MLP::isNAN(
const Float v)
const{
1069 if( v != v )
return true;
1075 if( !file.is_open() ){
1076 errorLog <<
"save(fstream &file) - File is not open!" << std::endl;
1080 file <<
"GRT_MLP_FILE_V2.0\n";
1084 errorLog <<
"save(fstream &file) - Failed to save Regressifier base settings to file!" << std::endl;
1088 file <<
"NumInputNeurons: "<<numInputNeurons<< std::endl;
1089 file <<
"NumHiddenNeurons: "<<numHiddenNeurons<< std::endl;
1090 file <<
"NumOutputNeurons: "<<numOutputNeurons<< std::endl;
1094 file <<
"NumRandomTrainingIterations: " << numRandomTrainingIterations << std::endl;
1095 file <<
"Momentum: " << momentum << std::endl;
1096 file <<
"Gamma: " << gamma << std::endl;
1097 file <<
"ClassificationMode: " << classificationModeActive << std::endl;
1098 file <<
"UseNullRejection: " << useNullRejection << std::endl;
1099 file <<
"RejectionThreshold: " << nullRejectionThreshold << std::endl;
1102 file <<
"InputLayer: \n";
1103 for(UINT i=0; i<numInputNeurons; i++){
1104 file <<
"InputNeuron: " << i+1 << std::endl;
1105 file <<
"NumInputs: " << inputLayer[i].numInputs << std::endl;
1106 file <<
"Bias: " << inputLayer[i].bias << std::endl;
1107 file <<
"Gamma: " << inputLayer[i].gamma << std::endl;
1108 file <<
"Weights: " << std::endl;
1109 for(UINT j=0; j<inputLayer[i].numInputs; j++){
1110 file << inputLayer[i].weights[j] <<
"\t";
1116 file <<
"HiddenLayer: \n";
1117 for(UINT i=0; i<numHiddenNeurons; i++){
1118 file <<
"HiddenNeuron: " << i+1 << std::endl;
1119 file <<
"NumInputs: " << hiddenLayer[i].numInputs << std::endl;
1120 file <<
"Bias: " << hiddenLayer[i].bias << std::endl;
1121 file <<
"Gamma: " << hiddenLayer[i].gamma << std::endl;
1122 file <<
"Weights: " << std::endl;
1123 for(UINT j=0; j<hiddenLayer[i].numInputs; j++){
1124 file << hiddenLayer[i].weights[j] <<
"\t";
1130 file <<
"OutputLayer: \n";
1131 for(UINT i=0; i<numOutputNeurons; i++){
1132 file <<
"OutputNeuron: " << i+1 << std::endl;
1133 file <<
"NumInputs: " << outputLayer[i].numInputs << std::endl;
1134 file <<
"Bias: " << outputLayer[i].bias << std::endl;
1135 file <<
"Gamma: " << outputLayer[i].gamma << std::endl;
1136 file <<
"Weights: " << std::endl;
1137 for(UINT j=0; j<outputLayer[i].numInputs; j++){
1138 file << outputLayer[i].weights[j] <<
"\t";
1149 std::string activationFunction;
1154 if( !file.is_open() ){
1155 errorLog <<
"load(fstream &file) - File is not open!" << std::endl;
1164 if( word ==
"GRT_MLP_FILE_V1.0" ){
1165 return loadLegacyModelFromFile( file );
1169 if( word !=
"GRT_MLP_FILE_V2.0" ){
1171 errorLog <<
"load(fstream &file) - Failed to find file header!" << std::endl;
1178 errorLog <<
"load(fstream &file) - Failed to load regressifier base settings from file!" << std::endl;
1183 if(word !=
"NumInputNeurons:"){
1185 errorLog <<
"load(fstream &file) - Failed to find NumInputNeurons!" << std::endl;
1188 file >> numInputNeurons;
1189 numInputDimensions = numInputNeurons;
1192 if(word !=
"NumHiddenNeurons:"){
1194 errorLog <<
"load(fstream &file) - Failed to find NumHiddenNeurons!" << std::endl;
1197 file >> numHiddenNeurons;
1200 if(word !=
"NumOutputNeurons:"){
1202 errorLog <<
"load(fstream &file) - Failed to find NumOutputNeurons!" << std::endl;
1205 file >> numOutputNeurons;
1208 if(word !=
"InputLayerActivationFunction:"){
1210 errorLog <<
"load(fstream &file) - Failed to find InputLayerActivationFunction!" << std::endl;
1213 file >> activationFunction;
1217 if(word !=
"HiddenLayerActivationFunction:"){
1219 errorLog <<
"load(fstream &file) - Failed to find HiddenLayerActivationFunction!" << std::endl;
1222 file >> activationFunction;
1226 if(word !=
"OutputLayerActivationFunction:"){
1228 errorLog <<
"load(fstream &file) - Failed to find OutputLayerActivationFunction!" << std::endl;
1231 file >> activationFunction;
1235 if(word !=
"NumRandomTrainingIterations:"){
1237 errorLog <<
"load(fstream &file) - Failed to find NumRandomTrainingIterations!" << std::endl;
1240 file >> numRandomTrainingIterations;
1243 if(word !=
"Momentum:"){
1245 errorLog <<
"load(fstream &file) - Failed to find Momentum!" << std::endl;
1251 if(word !=
"Gamma:"){
1253 errorLog <<
"load(fstream &file) - Failed to find Gamma!" << std::endl;
1259 if(word !=
"ClassificationMode:"){
1261 errorLog <<
"load(fstream &file) - Failed to find ClassificationMode!" << std::endl;
1264 file >> classificationModeActive;
1267 if(word !=
"UseNullRejection:"){
1269 errorLog <<
"load(fstream &file) - Failed to find UseNullRejection!" << std::endl;
1272 file >> useNullRejection;
1275 if(word !=
"RejectionThreshold:"){
1277 errorLog <<
"load(fstream &file) - Failed to find RejectionThreshold!" << std::endl;
1280 file >> nullRejectionThreshold;
1282 if( trained ) initialized =
true;
1283 else init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1288 inputLayer.
resize( numInputNeurons );
1289 hiddenLayer.
resize( numHiddenNeurons );
1290 outputLayer.
resize( numOutputNeurons );
1294 if(word !=
"InputLayer:"){
1296 errorLog <<
"load(fstream &file) - Failed to find InputLayer!" << std::endl;
1300 for(UINT i=0; i<numInputNeurons; i++){
1301 UINT tempNeuronID = 0;
1304 if(word !=
"InputNeuron:"){
1306 errorLog <<
"load(fstream &file) - Failed to find InputNeuron!" << std::endl;
1309 file >> tempNeuronID;
1311 if( tempNeuronID != i+1 ){
1313 errorLog <<
"load(fstream &file) - InputNeuron ID does not match!" << std::endl;
1318 if(word !=
"NumInputs:"){
1320 errorLog <<
"load(fstream &file) - Failed to find NumInputs!" << std::endl;
1323 file >> inputLayer[i].numInputs;
1326 inputLayer[i].weights.
resize( inputLayer[i].numInputs );
1329 if(word !=
"Bias:"){
1331 errorLog <<
"load(fstream &file) - Failed to find Bias!" << std::endl;
1334 file >> inputLayer[i].bias;
1337 if(word !=
"Gamma:"){
1339 errorLog <<
"load(fstream &file) - Failed to find Gamma!" << std::endl;
1342 file >> inputLayer[i].gamma;
1345 if(word !=
"Weights:"){
1347 errorLog <<
"load(fstream &file) - Failed to find Weights!" << std::endl;
1351 for(UINT j=0; j<inputLayer[i].numInputs; j++){
1352 file >> inputLayer[i].weights[j];
1358 if(word !=
"HiddenLayer:"){
1360 errorLog <<
"load(fstream &file) - Failed to find HiddenLayer!" << std::endl;
1364 for(UINT i=0; i<numHiddenNeurons; i++){
1365 UINT tempNeuronID = 0;
1368 if(word !=
"HiddenNeuron:"){
1370 errorLog <<
"load(fstream &file) - Failed to find HiddenNeuron!" << std::endl;
1373 file >> tempNeuronID;
1375 if( tempNeuronID != i+1 ){
1377 errorLog <<
"load(fstream &file) - Failed to find HiddenNeuron ID does not match!" << std::endl;
1382 if(word !=
"NumInputs:"){
1384 errorLog <<
"load(fstream &file) - Failed to find NumInputs!" << std::endl;
1387 file >> hiddenLayer[i].numInputs;
1390 hiddenLayer[i].weights.
resize( hiddenLayer[i].numInputs );
1393 if(word !=
"Bias:"){
1395 errorLog <<
"load(fstream &file) - Failed to find Bias!" << std::endl;
1398 file >> hiddenLayer[i].bias;
1401 if(word !=
"Gamma:"){
1403 errorLog <<
"load(fstream &file) - Failed to find Gamma!" << std::endl;
1406 file >> hiddenLayer[i].gamma;
1409 if(word !=
"Weights:"){
1411 errorLog <<
"load(fstream &file) - Failed to find Weights!" << std::endl;
1415 for(
unsigned int j=0; j<hiddenLayer[i].numInputs; j++){
1416 file >> hiddenLayer[i].weights[j];
1422 if(word !=
"OutputLayer:"){
1424 errorLog <<
"load(fstream &file) - Failed to find OutputLayer!" << std::endl;
1428 for(UINT i=0; i<numOutputNeurons; i++){
1429 UINT tempNeuronID = 0;
1432 if(word !=
"OutputNeuron:"){
1434 errorLog <<
"load(fstream &file) - Failed to find OutputNeuron!" << std::endl;
1437 file >> tempNeuronID;
1439 if( tempNeuronID != i+1 ){
1441 errorLog <<
"load(fstream &file) - Failed to find OuputNeuron ID does not match!!" << std::endl;
1446 if(word !=
"NumInputs:"){
1448 errorLog <<
"load(fstream &file) - Failed to find NumInputs!" << std::endl;
1451 file >> outputLayer[i].numInputs;
1454 outputLayer[i].weights.
resize( outputLayer[i].numInputs );
1457 if(word !=
"Bias:"){
1459 errorLog <<
"load(fstream &file) - Failed to find Bias!" << std::endl;
1462 file >> outputLayer[i].bias;
1465 if(word !=
"Gamma:"){
1467 errorLog <<
"load(fstream &file) - Failed to find Gamma!" << std::endl;
1470 file >> outputLayer[i].gamma;
1473 if(word !=
"Weights:"){
1475 errorLog <<
"load(fstream &file) - Failed to find Weights!" << std::endl;
1479 for(UINT j=0; j<outputLayer[i].numInputs; j++){
1480 file >> outputLayer[i].weights[j];
1490 if( classificationModeActive )
1491 return numOutputNeurons;
1496 return numInputNeurons;
1500 return numHiddenNeurons;
1504 return numOutputNeurons;
1508 return inputLayerActivationFunction;
1512 return hiddenLayerActivationFunction;
1516 return outputLayerActivationFunction;
1520 return numRandomTrainingIterations;
1524 return learningRate;
1536 return trainingError;
1540 return classificationModeActive;
1544 return !classificationModeActive;
1560 return trainingErrorLog;
1564 return useNullRejection;
1568 return nullRejectionCoeff;
1572 return nullRejectionThreshold;
1576 if( trained )
return maxLikelihood;
1581 if( trained && classificationModeActive )
return classLikelihoods;
1587 if( trained && classificationModeActive )
return regressionData;
1592 if( trained && classificationModeActive )
return predictedClassLabel;
1597 std::string activationName;
1599 switch(activationFunction){
1600 case(Neuron::LINEAR):
1601 activationName =
"LINEAR";
1603 case(Neuron::SIGMOID):
1604 activationName =
"SIGMOID";
1606 case(Neuron::BIPOLAR_SIGMOID):
1607 activationName =
"BIPOLAR_SIGMOID";
1610 activationName =
"UNKNOWN";
1614 return activationName;
1618 UINT activationFunction = 0;
1620 if(activationName ==
"LINEAR" ){
1621 activationFunction = 0;
1622 return activationFunction;
1624 if(activationName ==
"SIGMOID" ){
1625 activationFunction = 1;
1626 return activationFunction;
1628 if(activationName ==
"BIPOLAR_SIGMOID" ){
1629 activationFunction = 2;
1630 return activationFunction;
1632 return activationFunction;
1636 if( actvationFunction >= Neuron::LINEAR && actvationFunction < Neuron::NUMBER_OF_ACTIVATION_FUNCTIONS )
return true;
1643 warningLog <<
"setInputLayerActivationFunction(const UINT activationFunction) - The activation function is not valid. It should be one of the Neuron ActivationFunctions enums." << std::endl;
1646 this->inputLayerActivationFunction = activationFunction;
1649 return init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1659 warningLog <<
"setHiddenLayerActivationFunction(const UINT activationFunction) - The activation function is not valid. It should be one of the Neuron ActivationFunctions enums." << std::endl;
1662 this->hiddenLayerActivationFunction = activationFunction;
1665 return init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1675 warningLog <<
"setOutputLayerActivationFunction(const UINT activationFunction) - The activation function is not valid. It should be one of the Neuron ActivationFunctions enums." << std::endl;
1678 this->outputLayerActivationFunction = activationFunction;
1681 return init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1692 if( momentum >= 0 && momentum <= 1.0 ){
1693 this->momentum = momentum;
1702 warningLog <<
"setGamma(const Float gamma) - Gamma must be greater than zero!" << std::endl;
1705 this->gamma = gamma;
1708 return init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1715 if( numRandomTrainingIterations > 0 ){
1716 this->numRandomTrainingIterations = numRandomTrainingIterations;
1723 this->useNullRejection = useNullRejection;
1728 if( nullRejectionCoeff > 0 ){
1729 this->nullRejectionCoeff = nullRejectionCoeff;
1735 bool MLP::loadLegacyModelFromFile( std::fstream &file ){
1740 if(word !=
"NumInputNeurons:"){
1742 errorLog <<
"load(fstream &file) - Failed to find NumInputNeurons!" << std::endl;
1745 file >> numInputNeurons;
1746 numInputDimensions = numInputNeurons;
1749 if(word !=
"NumHiddenNeurons:"){
1751 errorLog <<
"load(fstream &file) - Failed to find NumHiddenNeurons!" << std::endl;
1754 file >> numHiddenNeurons;
1757 if(word !=
"NumOutputNeurons:"){
1759 errorLog <<
"load(fstream &file) - Failed to find NumOutputNeurons!" << std::endl;
1762 file >> numOutputNeurons;
1765 if(word !=
"InputLayerActivationFunction:"){
1767 errorLog <<
"load(fstream &file) - Failed to find InputLayerActivationFunction!" << std::endl;
1774 if(word !=
"HiddenLayerActivationFunction:"){
1776 errorLog <<
"load(fstream &file) - Failed to find HiddenLayerActivationFunction!" << std::endl;
1783 if(word !=
"OutputLayerActivationFunction:"){
1785 errorLog <<
"load(fstream &file) - Failed to find OutputLayerActivationFunction!" << std::endl;
1792 if(word !=
"MinNumEpochs:"){
1794 errorLog <<
"load(fstream &file) - Failed to find MinNumEpochs!" << std::endl;
1797 file >> minNumEpochs;
1800 if(word !=
"MaxNumEpochs:"){
1802 errorLog <<
"load(fstream &file) - Failed to find MaxNumEpochs!" << std::endl;
1805 file >> maxNumEpochs;
1808 if(word !=
"NumRandomTrainingIterations:"){
1810 errorLog <<
"load(fstream &file) - Failed to find NumRandomTrainingIterations!" << std::endl;
1813 file >> numRandomTrainingIterations;
1816 if(word !=
"ValidationSetSize:"){
1818 errorLog <<
"load(fstream &file) - Failed to find ValidationSetSize!" << std::endl;
1821 file >> validationSetSize;
1824 if(word !=
"MinChange:"){
1826 errorLog <<
"load(fstream &file) - Failed to find MinChange!" << std::endl;
1832 if(word !=
"TrainingRate:"){
1834 errorLog <<
"load(fstream &file) - Failed to find TrainingRate!" << std::endl;
1837 file >> learningRate;
1840 if(word !=
"Momentum:"){
1842 errorLog <<
"load(fstream &file) - Failed to find Momentum!" << std::endl;
1848 if(word !=
"Gamma:"){
1850 errorLog <<
"load(fstream &file) - Failed to find Gamma!" << std::endl;
1856 if(word !=
"UseValidationSet:"){
1858 errorLog <<
"load(fstream &file) - Failed to find UseValidationSet!" << std::endl;
1861 file >> useValidationSet;
1864 if(word !=
"RandomiseTrainingOrder:"){
1866 errorLog <<
"load(fstream &file) - Failed to find RandomiseTrainingOrder!" << std::endl;
1869 file >> randomiseTrainingOrder;
1872 if(word !=
"UseScaling:"){
1874 errorLog <<
"load(fstream &file) - Failed to find UseScaling!" << std::endl;
1880 if(word !=
"ClassificationMode:"){
1882 errorLog <<
"load(fstream &file) - Failed to find ClassificationMode!" << std::endl;
1885 file >> classificationModeActive;
1888 if(word !=
"UseNullRejection:"){
1890 errorLog <<
"load(fstream &file) - Failed to find UseNullRejection!" << std::endl;
1893 file >> useNullRejection;
1896 if(word !=
"RejectionThreshold:"){
1898 errorLog <<
"load(fstream &file) - Failed to find RejectionThreshold!" << std::endl;
1901 file >> nullRejectionThreshold;
1904 inputLayer.
resize( numInputNeurons );
1905 hiddenLayer.
resize( numHiddenNeurons );
1906 outputLayer.
resize( numOutputNeurons );
1910 if(word !=
"InputLayer:"){
1912 errorLog <<
"load(fstream &file) - Failed to find InputLayer!" << std::endl;
1916 for(UINT i=0; i<numInputNeurons; i++){
1917 UINT tempNeuronID = 0;
1920 if(word !=
"InputNeuron:"){
1922 errorLog <<
"load(fstream &file) - Failed to find InputNeuron!" << std::endl;
1925 file >> tempNeuronID;
1927 if( tempNeuronID != i+1 ){
1929 errorLog <<
"load(fstream &file) - InputNeuron ID does not match!" << std::endl;
1934 if(word !=
"NumInputs:"){
1936 errorLog <<
"load(fstream &file) - Failed to find NumInputs!" << std::endl;
1939 file >> inputLayer[i].numInputs;
1942 inputLayer[i].weights.
resize( inputLayer[i].numInputs );
1945 if(word !=
"Bias:"){
1947 errorLog <<
"load(fstream &file) - Failed to find Bias!" << std::endl;
1950 file >> inputLayer[i].bias;
1953 if(word !=
"Gamma:"){
1955 errorLog <<
"load(fstream &file) - Failed to find Gamma!" << std::endl;
1958 file >> inputLayer[i].gamma;
1961 if(word !=
"Weights:"){
1963 errorLog <<
"load(fstream &file) - Failed to find Weights!" << std::endl;
1967 for(UINT j=0; j<inputLayer[i].numInputs; j++){
1968 file >> inputLayer[i].weights[j];
1974 if(word !=
"HiddenLayer:"){
1976 errorLog <<
"load(fstream &file) - Failed to find HiddenLayer!" << std::endl;
1980 for(UINT i=0; i<numHiddenNeurons; i++){
1981 UINT tempNeuronID = 0;
1984 if(word !=
"HiddenNeuron:"){
1986 errorLog <<
"load(fstream &file) - Failed to find HiddenNeuron!" << std::endl;
1989 file >> tempNeuronID;
1991 if( tempNeuronID != i+1 ){
1993 errorLog <<
"load(fstream &file) - Failed to find HiddenNeuron ID does not match!" << std::endl;
1998 if(word !=
"NumInputs:"){
2000 errorLog <<
"load(fstream &file) - Failed to find NumInputs!" << std::endl;
2003 file >> hiddenLayer[i].numInputs;
2006 hiddenLayer[i].weights.
resize( hiddenLayer[i].numInputs );
2009 if(word !=
"Bias:"){
2011 errorLog <<
"load(fstream &file) - Failed to find Bias!" << std::endl;
2014 file >> hiddenLayer[i].bias;
2017 if(word !=
"Gamma:"){
2019 errorLog <<
"load(fstream &file) - Failed to find Gamma!" << std::endl;
2022 file >> hiddenLayer[i].gamma;
2025 if(word !=
"Weights:"){
2027 errorLog <<
"load(fstream &file) - Failed to find Weights!" << std::endl;
2031 for(
unsigned int j=0; j<hiddenLayer[i].numInputs; j++){
2032 file >> hiddenLayer[i].weights[j];
2038 if(word !=
"OutputLayer:"){
2040 errorLog <<
"load(fstream &file) - Failed to find OutputLayer!" << std::endl;
2044 for(UINT i=0; i<numOutputNeurons; i++){
2045 UINT tempNeuronID = 0;
2048 if(word !=
"OutputNeuron:"){
2050 errorLog <<
"load(fstream &file) - Failed to find OutputNeuron!" << std::endl;
2053 file >> tempNeuronID;
2055 if( tempNeuronID != i+1 ){
2057 errorLog <<
"load(fstream &file) - Failed to find OuputNeuron ID does not match!!" << std::endl;
2062 if(word !=
"NumInputs:"){
2064 errorLog <<
"load(fstream &file) - Failed to find NumInputs!" << std::endl;
2067 file >> outputLayer[i].numInputs;
2070 outputLayer[i].weights.
resize( outputLayer[i].numInputs );
2073 if(word !=
"Bias:"){
2075 errorLog <<
"load(fstream &file) - Failed to find Bias!" << std::endl;
2078 file >> outputLayer[i].bias;
2081 if(word !=
"Gamma:"){
2083 errorLog <<
"load(fstream &file) - Failed to find Gamma!" << std::endl;
2086 file >> outputLayer[i].gamma;
2089 if(word !=
"Weights:"){
2091 errorLog <<
"load(fstream &file) - Failed to find Weights!" << std::endl;
2095 for(UINT j=0; j<outputLayer[i].numInputs; j++){
2096 file >> outputLayer[i].weights[j];
2102 inputVectorRanges.
resize( numInputNeurons );
2103 targetVectorRanges.
resize( numOutputNeurons );
2107 if(word !=
"InputVectorRanges:"){
2109 errorLog <<
"load(fstream &file) - Failed to find InputVectorRanges!" << std::endl;
2112 for(UINT j=0; j<inputVectorRanges.size(); j++){
2113 file >> inputVectorRanges[j].minValue;
2114 file >> inputVectorRanges[j].maxValue;
2118 if(word !=
"OutputVectorRanges:"){
2120 errorLog <<
"load(fstream &file) - Failed to find OutputVectorRanges!" << std::endl;
2123 for(UINT j=0; j<targetVectorRanges.size(); j++){
2124 file >> targetVectorRanges[j].minValue;
2125 file >> targetVectorRanges[j].maxValue;
bool setLearningRate(const Float learningRate)
UINT getNumHiddenNeurons() const
virtual bool save(std::fstream &file) const
MLP & operator=(const MLP &rhs)
VectorFloat feedforward(VectorFloat data)
#define DEFAULT_NULL_LIKELIHOOD_VALUE
VectorFloat getClassDistances() const
RegressionData reformatAsRegressionData() const
bool setTrainingRate(const Float trainingRate)
Float scale(const Float &x, const Float &minSource, const Float &maxSource, const Float &minTarget, const Float &maxTarget, const bool constrain=false)
bool getNullRejectionEnabled() const
Vector< MinMax > getInputRanges() const
bool validateActivationFunction(const UINT avactivationFunction) const
virtual bool resize(const unsigned int size)
Float back_prop(const VectorFloat &inputVector, const VectorFloat &targetVector, const Float alpha, const Float beta)
static Float getMin(const VectorFloat &x)
UINT getInputLayerActivationFunction() const
bool copyBaseVariables(const Regressifier *regressifier)
UINT getNumInputDimensions() const
Vector< Neuron > getHiddenLayer() const
bool init(const UINT numInputNeurons, const UINT numHiddenNeurons, const UINT numOutputNeurons)
std::string activationFunctionToString(const UINT activationFunction) const
Vector< VectorFloat > getTrainingLog() const
UINT getNumRandomTrainingIterations() const
Vector< MinMax > getTargetRanges() const
bool saveBaseSettingsToFile(std::fstream &file) const
virtual bool train_(ClassificationData &trainingData)
bool scale(const Float minTarget, const Float maxTarget)
bool setInputLayerActivationFunction(const UINT activationFunction)
This class implements a Multilayer Perceptron Artificial Neural Network.
UINT getNumTargetDimensions() const
Vector< Neuron > getOutputLayer() const
bool setHiddenLayerActivationFunction(const UINT activationFunction)
std::string getRegressifierType() const
Vector< Neuron > getInputLayer() const
UINT getNumOutputNeurons() const
bool loadBaseSettingsFromFile(std::fstream &file)
void printNetwork() const
UINT getNumDimensions() const
UINT getNumClasses() const
UINT activationFunctionFromString(const std::string activationName) const
Float getMomentum() const
virtual bool print() const
bool setNullRejection(const bool useNullRejection)
VectorFloat getClassLikelihoods() const
UINT getOutputLayerActivationFunction() const
bool setMomentum(const Float momentum)
Float getTrainingError() const
Float getMaximumLikelihood() const
UINT getHiddenLayerActivationFunction() const
Float getNullRejectionCoeff() const
virtual bool deepCopyFrom(const Regressifier *regressifier)
RegressionData split(const UINT trainingSizePercentage)
bool setNumRandomTrainingIterations(const UINT numRandomTrainingIterations)
bool setGamma(const Float gamma)
int getRandomNumberInt(int minRange, int maxRange)
UINT getNumClasses() const
Float getNullRejectionThreshold() const
virtual bool load(std::fstream &file)
bool setNullRejectionCoeff(const Float nullRejectionCoeff)
bool getRegressionModeActive() const
Float getTrainingRate() const
bool setOutputLayerActivationFunction(const UINT activationFunction)
static Float sum(const VectorFloat &x)
void setSeed(unsigned long long seed=0)
UINT getNumSamples() const
UINT getPredictedClassLabel() const
bool getClassificationModeActive() const
virtual bool predict_(VectorFloat &inputVector)
UINT getNumInputNeurons() const