21 #define GRT_DLL_EXPORTS 23 #include "../../../CoreModules/Regressifier.h" 28 const std::string MLP::id =
"MLP";
36 inputLayerActivationFunction = Neuron::LINEAR;
37 hiddenLayerActivationFunction = Neuron::TANH;
38 outputLayerActivationFunction = Neuron::LINEAR;
41 validationSetSize = 20;
42 trainingMode = ONLINE_GRADIENT_DESCENT;
46 nullRejectionCoeff = 0.9;
47 nullRejectionThreshold = 0;
48 useValidationSet =
true;
49 randomiseTrainingOrder =
false;
53 classificationModeActive =
false;
54 useNullRejection =
true;
70 this->numInputNeurons = rhs.numInputNeurons;
71 this->numHiddenNeurons = rhs.numHiddenNeurons;
72 this->numOutputNeurons = rhs.numOutputNeurons;
73 this->inputLayerActivationFunction = rhs.inputLayerActivationFunction;
74 this->hiddenLayerActivationFunction = rhs.hiddenLayerActivationFunction;
75 this->outputLayerActivationFunction = rhs.outputLayerActivationFunction;
76 this->trainingMode = rhs.trainingMode;
77 this->momentum = rhs.momentum;
78 this->trainingError = rhs.trainingError;
79 this->gamma = rhs.gamma;
80 this->initialized = rhs.initialized;
81 this->inputLayer = rhs.inputLayer;
82 this->hiddenLayer = rhs.hiddenLayer;
83 this->outputLayer = rhs.outputLayer;
84 this->inputVectorRanges = rhs.inputVectorRanges;
85 this->targetVectorRanges = rhs.targetVectorRanges;
86 this->trainingErrorLog = rhs.trainingErrorLog;
87 this->outputTargets = rhs.outputTargets;
89 this->classificationModeActive = rhs.classificationModeActive;
90 this->useNullRejection = rhs.useNullRejection;
91 this->predictedClassLabel = rhs.predictedClassLabel;
92 this->nullRejectionCoeff = rhs.nullRejectionCoeff;
93 this->nullRejectionThreshold = rhs.nullRejectionThreshold;
94 this->maxLikelihood = rhs.maxLikelihood;
95 this->classLikelihoods = rhs.classLikelihoods;
105 if( regressifier == NULL ){
106 errorLog << __GRT_LOG__ <<
" regressifier is NULL!" << std::endl;
111 errorLog << __GRT_LOG__ <<
" regressifier is not the correct type!" << std::endl;
115 *
this = *
dynamic_cast<const MLP*
>(regressifier);
124 errorLog << __GRT_LOG__ <<
" The MLP has not been initialized!" << std::endl;
129 errorLog << __GRT_LOG__ <<
" The number of input dimensions in the training data (" << trainingData.
getNumDimensions() <<
") does not match that of the MLP (" << numInputNeurons <<
")" << std::endl;
133 errorLog << __GRT_LOG__ <<
" The number of classes in the training data (" << trainingData.
getNumClasses() <<
") does not match that of the MLP (" << numOutputNeurons <<
")" << std::endl;
141 classificationModeActive =
true;
143 return trainModel(regressionData);
149 classificationModeActive =
false;
151 return trainModel(trainingData);
158 errorLog << __GRT_LOG__ <<
" Model not trained!" << std::endl;
162 if( inputVector.
getSize() != numInputNeurons ){
163 errorLog << __GRT_LOG__ <<
" The size of the input Vector (" << inputVector.
getSize() <<
") does not match that of the number of input dimensions (" << numInputNeurons <<
") " << std::endl;
170 if( classificationModeActive ){
173 const UINT K = (UINT)regressionData.
getSize();
174 classLikelihoods = regressionData;
178 for(UINT i=0; i<K; i++){
179 classLikelihoods[i] += minValue;
185 for(UINT i=0; i<K; i++){
186 classLikelihoods[i] /= sum;
191 Float bestValue = classLikelihoods[0];
193 for(UINT i=1; i<K; i++){
194 if( classLikelihoods[i] > bestValue ){
195 bestValue = classLikelihoods[i];
201 maxLikelihood = bestValue;
202 predictedClassLabel = bestIndex+1;
204 if( useNullRejection ){
205 if( maxLikelihood < nullRejectionCoeff ){
206 predictedClassLabel = 0;
214 bool MLP::init(
const UINT numInputNeurons,
const UINT numHiddenNeurons,
const UINT numOutputNeurons){
215 return init(numInputNeurons, numHiddenNeurons, numOutputNeurons, inputLayerActivationFunction, hiddenLayerActivationFunction, outputLayerActivationFunction );
219 const UINT numHiddenNeurons,
220 const UINT numOutputNeurons,
221 const Neuron::Type inputLayerActivationFunction,
222 const Neuron::Type hiddenLayerActivationFunction,
223 const Neuron::Type outputLayerActivationFunction){
229 random.
setSeed( (UINT)time(NULL) );
231 if( numInputNeurons == 0 || numHiddenNeurons == 0 || numOutputNeurons == 0 ){
232 if( numInputNeurons == 0 ){ errorLog << __GRT_LOG__ <<
" The number of input neurons is zero!" << std::endl; }
233 if( numHiddenNeurons == 0 ){ errorLog << __GRT_LOG__ <<
" The number of hidden neurons is zero!" << std::endl; }
234 if( numOutputNeurons == 0 ){ errorLog << __GRT_LOG__ <<
" The number of output neurons is zero!" << std::endl; }
240 errorLog << __GRT_LOG__ <<
" One Of The Activation Functions Failed The Validation Check" << std::endl;
245 this->numInputNeurons = numInputNeurons;
246 this->numHiddenNeurons = numHiddenNeurons;
247 this->numOutputNeurons = numOutputNeurons;
250 this->numInputDimensions = numInputNeurons;
251 this->numOutputDimensions = numOutputNeurons;
254 this->inputLayerActivationFunction = inputLayerActivationFunction;
255 this->hiddenLayerActivationFunction = hiddenLayerActivationFunction;
256 this->outputLayerActivationFunction = outputLayerActivationFunction;
259 inputLayer.
resize(numInputNeurons);
260 hiddenLayer.
resize(numHiddenNeurons);
261 outputLayer.
resize(numOutputNeurons);
264 for(UINT i=0; i<numInputNeurons; i++){
265 inputLayer[i].init(1,inputLayerActivationFunction,random);
266 inputLayer[i].weights[0] = 1.0;
267 inputLayer[i].bias = 0.0;
268 inputLayer[i].gamma = gamma;
271 const Float hiddenLayerScaleFactor = 1.0/sqrt(numInputNeurons);
272 const Float outputLayerScaleFactor = 1.0/sqrt(numHiddenNeurons);
274 for(UINT i=0; i<numHiddenNeurons; i++){
276 hiddenLayer[i].init(numInputNeurons,hiddenLayerActivationFunction,random,-hiddenLayerScaleFactor,hiddenLayerScaleFactor);
277 hiddenLayer[i].gamma = gamma;
280 for(UINT i=0; i<numOutputNeurons; i++){
282 outputLayer[i].init(numHiddenNeurons,outputLayerActivationFunction,random,-outputLayerScaleFactor,outputLayerScaleFactor);
283 outputLayer[i].gamma = gamma;
297 numHiddenNeurons = 0;
298 numOutputNeurons = 0;
317 errorLog << __GRT_LOG__ <<
" The MLP has not be initialized!" << std::endl;
322 errorLog << __GRT_LOG__ <<
" The training data is empty!" << std::endl;
328 if( useValidationSet ){
329 validationData = trainingData.
split( 100 - validationSetSize );
335 if( N != numInputNeurons ){
336 errorLog << __GRT_LOG__ <<
" The number of input dimensions in the training data (" << N <<
") does not match that of the MLP (" << numInputNeurons <<
")" << std::endl;
339 if( T != numOutputNeurons ){
340 errorLog << __GRT_LOG__ <<
" The number of target dimensions in the training data (" << T <<
") does not match that of the MLP (" << numOutputNeurons <<
")" << std::endl;
345 numInputDimensions = numInputNeurons;
346 numOutputDimensions = numOutputNeurons;
360 trainingData.
scale(inputVectorRanges,targetVectorRanges,outputTargets.minValue,outputTargets.maxValue);
362 if( useValidationSet ){
363 validationData.
scale(inputVectorRanges,targetVectorRanges,outputTargets.minValue,outputTargets.maxValue);
368 bool tempScalingState = useScaling;
372 trainingErrorLog.clear();
373 inputNeuronsOutput.
resize(numInputNeurons);
374 hiddenNeuronsOutput.
resize(numHiddenNeurons);
375 outputNeuronsOutput.
resize(numOutputNeurons);
376 deltaO.
resize(numOutputNeurons);
377 deltaH.
resize(numHiddenNeurons);
380 switch( trainingMode ){
381 case ONLINE_GRADIENT_DESCENT:
382 if( classificationModeActive ){
383 trained = trainOnlineGradientDescentClassification( trainingData, validationData );
385 trained = trainOnlineGradientDescentRegression( trainingData, validationData );
389 useScaling = tempScalingState;
390 errorLog << __GRT_LOG__ <<
" Uknown training mode!" << std::endl;
396 useScaling = tempScalingState;
405 const UINT numTestingExamples = useValidationSet ? validationData.
getNumSamples() : M;
409 totalSquaredTrainingError = 0;
410 rmsTrainingError = 0;
411 rmsValidationError = 0;
413 bool keepTraining =
true;
418 Float lRate = learningRate;
419 Float lMomentum = momentum;
423 Float trainingSetAccuracy = 0;
424 Float trainingSetTotalSquaredError = 0;
428 Float bestAccuracy = 0;
430 Float backPropError = 0;
436 trainingResults.reserve(M);
439 for(UINT i=0; i<M; i++) indexList[i] = i;
441 for(UINT iter=0; iter<numRestarts; iter++){
445 tempTrainingErrorLog.clear();
448 init(numInputNeurons,numHiddenNeurons,numOutputNeurons,inputLayerActivationFunction,hiddenLayerActivationFunction,outputLayerActivationFunction);
450 if( randomiseTrainingOrder ){
451 for(UINT i=0; i<M; i++){
456 while( keepTraining ){
460 totalSquaredTrainingError = 0;
462 for(UINT i=0; i<M; i++){
464 const VectorFloat &trainingExample = trainingData[ indexList[i] ].getInputVector();
465 const VectorFloat &targetVector = trainingData[ indexList[i] ].getTargetVector();
468 backPropError =
back_prop(trainingExample,targetVector,lRate,lMomentum);
472 if( isNAN(backPropError) ){
473 keepTraining =
false;
474 errorLog << __GRT_LOG__ <<
" NaN found in back propagation error, training index: " << indexList[i] << std::endl;
479 if( classificationModeActive ){
483 bestValue = targetVector[0];
485 for(UINT i=1; i<targetVector.size(); i++){
486 if( targetVector[i] > bestValue ){
487 bestValue = targetVector[i];
491 classLabel = bestIndex + 1;
496 for(UINT i=1; i<numOutputNeurons; i++){
497 if( y[i] > bestValue ){
502 predictedClassLabel = bestIndex+1;
504 if( classLabel == predictedClassLabel ){
509 totalSquaredTrainingError += backPropError;
514 keepTraining =
false;
515 errorLog << __GRT_LOG__ <<
" NaN found in weights at " << epoch << std::endl;
520 if( useValidationSet ){
521 trainingSetAccuracy = accuracy;
522 trainingSetTotalSquaredError = totalSquaredTrainingError;
524 totalSquaredTrainingError = 0;
528 for(UINT i=0; i<numValidationSamples; i++){
529 const VectorFloat &inputVector = validationData[i].getInputVector();
530 const VectorFloat &targetVector = validationData[i].getTargetVector();
534 if( classificationModeActive ){
536 bestValue = targetVector[0];
538 for(UINT i=1; i<numInputNeurons; i++){
539 if( targetVector[i] > bestValue ){
540 bestValue = targetVector[i];
544 classLabel = bestIndex + 1;
549 for(UINT i=1; i<numOutputNeurons; i++){
550 if( y[i] > bestValue ){
555 predictedClassLabel = bestIndex+1;
557 if( classLabel == predictedClassLabel ){
563 for(UINT j=0; j<T; j++){
564 totalSquaredTrainingError += SQR( targetVector[j]-y[j] );
569 accuracy = (accuracy/Float(numValidationSamples))*Float(numValidationSamples);
570 rmsValidationError = sqrt( totalSquaredTrainingError / Float(numValidationSamples) );
573 accuracy = (accuracy/Float(M))*Float(M);
574 rmsTrainingError = sqrt( totalSquaredTrainingError / Float(M) );
579 temp[0] = 100.0 - trainingSetAccuracy;
580 temp[1] = 100.0 - accuracy;
581 tempTrainingErrorLog.push_back( temp );
583 error = 100.0 - accuracy;
587 trainingResults.push_back( result );
589 delta = fabs( error - lastError );
591 trainingLog <<
"Random Training Iteration: " << iter+1 <<
" Epoch: " << epoch <<
" Error: " << error <<
" Delta: " << delta << std::endl;
594 if( ++epoch >= maxNumEpochs ){
595 keepTraining =
false;
597 if( delta <= minChange && epoch >= minNumEpochs ){
598 keepTraining =
false;
605 trainingResultsObserverManager.notifyObservers( result );
609 if( lastError < bestError ){
611 bestError = lastError;
612 bestTSError = totalSquaredTrainingError;
613 bestRMSError = rmsTrainingError;
614 bestAccuracy = accuracy;
616 trainingErrorLog = tempTrainingErrorLog;
621 trainingLog <<
"Best Accuracy: " << bestAccuracy <<
" in Random Training Iteration: " << bestIter+1 << std::endl;
625 errorLog << __GRT_LOG__ <<
" NAN Found!" << std::endl;
631 trainingError = bestAccuracy;
634 if( useNullRejection ){
636 Float averageValue = 0;
637 VectorFloat classificationPredictions, inputVector, targetVector;
639 for(UINT i=0; i<numTestingExamples; i++){
640 inputVector = useValidationSet ? validationData[i].getInputVector() : trainingData[i].getInputVector();
641 targetVector = useValidationSet ? validationData[i].getTargetVector() : trainingData[i].getTargetVector();
647 bestValue = targetVector[0];
649 for(UINT i=1; i<targetVector.size(); i++){
650 if( targetVector[i] > bestValue ){
651 bestValue = targetVector[i];
655 classLabel = bestIndex + 1;
660 for(UINT i=1; i<y.size(); i++){
661 if( y[i] > bestValue ){
666 predictedClassLabel = bestIndex+1;
669 if( classLabel == predictedClassLabel ){
670 classificationPredictions.push_back( bestValue );
671 averageValue += bestValue;
675 averageValue /= Float(classificationPredictions.size());
677 for(UINT i=0; i<classificationPredictions.size(); i++){
678 stdDev += SQR(classificationPredictions[i]-averageValue);
680 stdDev = sqrt( stdDev / Float(classificationPredictions.size()-1) );
682 nullRejectionThreshold = averageValue-(stdDev*nullRejectionCoeff);
693 const UINT numValidationSamples = useValidationSet ? validationData.
getNumSamples() : M;
696 bool keepTraining =
true;
698 Float alpha = learningRate;
699 Float beta = momentum;
702 totalSquaredTrainingError = 0;
703 rmsTrainingError = 0;
714 trainingResults.reserve(M);
717 for(UINT i=0; i<M; i++) indexList[i] = i;
719 for(UINT iter=0; iter<numRestarts; iter++){
723 tempTrainingErrorLog.clear();
726 init(numInputNeurons,numHiddenNeurons,numOutputNeurons,inputLayerActivationFunction,hiddenLayerActivationFunction,outputLayerActivationFunction);
728 if( randomiseTrainingOrder ){
729 std::random_shuffle(indexList.begin(), indexList.end());
732 while( keepTraining ){
735 totalSquaredTrainingError = 0;
736 rmsTrainingError = 0;
737 rmsValidationError = 0;
739 for(UINT i=0; i<M; i++){
742 const UINT randomIndex = indexList[i];
743 const VectorFloat &trainingExample = trainingData[ randomIndex ].getInputVector();
744 const VectorFloat &targetVector = trainingData[ randomIndex ].getTargetVector();
747 Float backPropError =
back_prop(trainingExample,targetVector,alpha,beta);
749 if( isNAN(backPropError) ){
750 keepTraining =
false;
751 errorLog << __GRT_LOG__ <<
" NaN found in back propagation error, epoch: " << epoch <<
" training iter: " << i <<
" random index: " << indexList[ randomIndex ] << std::endl;
756 totalSquaredTrainingError += backPropError;
760 keepTraining =
false;
761 errorLog << __GRT_LOG__ <<
" NaN found in weights at epoch " << epoch << std::endl;
766 rmsTrainingError = sqrt( totalSquaredTrainingError / Float(M) );
769 if( useValidationSet ){
772 for(UINT n=0; n<numValidationSamples; n++){
773 const VectorFloat &trainingExample = validationData[n].getInputVector();
774 const VectorFloat &targetVector = validationData[n].getTargetVector();
780 for(UINT j=0; j<T; j++){
781 error += SQR( targetVector[j]-y[j] );
783 rmsValidationError += sqrt( error );
786 rmsValidationError = sqrt( rmsValidationError / Float(numValidationSamples) );
791 temp[0] = rmsTrainingError;
792 temp[1] = rmsValidationError;
793 tempTrainingErrorLog.push_back( temp );
795 error = rmsTrainingError;
799 trainingResults.push_back( result );
801 delta = fabs( error - lastError );
803 trainingLog <<
"Random Training Iteration: " << iter+1 <<
" Epoch: " << epoch <<
" Rms Training Error: " << rmsTrainingError;
804 if( useValidationSet ) trainingLog <<
" Rms Validation Error: " << rmsValidationError;
805 trainingLog <<
" Delta: " << delta << std::endl;
808 if( ++epoch >= maxNumEpochs ){
809 keepTraining =
false;
811 if( delta <= minChange && epoch >= minNumEpochs ){
812 keepTraining =
false;
819 trainingResultsObserverManager.notifyObservers( result );
824 if( lastError < bestError ){
826 bestError = lastError;
827 bestTSError = totalSquaredTrainingError;
828 bestRMSError = rmsTrainingError;
830 trainingErrorLog = tempTrainingErrorLog;
835 trainingLog <<
"Best Rms Error: " << bestRMSError <<
" in Random Training Iteration: " << bestIter+1 << std::endl;
839 errorLog << __GRT_LOG__ <<
" NAN Found!" << std::endl;
845 trainingError = bestRMSError;
859 feedforward(trainingExample,inputNeuronsOutput,hiddenNeuronsOutput,outputNeuronsOutput);
862 for(k=0; k<numOutputNeurons; k++){
863 error = targetVector[k]-outputNeuronsOutput[k];
864 sqrError += SQR( error );
865 deltaO[k] = outputLayer[k].getDerivative( outputNeuronsOutput[k] ) * error;
867 sqrError = sqrt( sqrError );
870 for(j=0; j<numHiddenNeurons; j++){
872 for(k=0; k<numOutputNeurons; k++){
873 error += outputLayer[k].weights[j] * deltaO[k];
875 deltaH[j] = hiddenLayer[j].getDerivative( hiddenNeuronsOutput[j] ) * error;
879 for(j=0; j<numHiddenNeurons; j++){
880 for(k=0; k<numOutputNeurons; k++){
881 update = deltaO[k] * hiddenNeuronsOutput[j];
882 outputLayer[k].weights[j] += learningRate*update + learningMomentum*outputLayer[k].previousUpdate[j];
883 outputLayer[k].previousUpdate[j] = update;
888 for(i=0; i<numInputNeurons; i++){
889 for(j=0; j<numHiddenNeurons; j++){
890 update = deltaH[j] * inputNeuronsOutput[i];
891 hiddenLayer[j].weights[i] += learningRate*update + learningMomentum*hiddenLayer[j].previousUpdate[i];
892 hiddenLayer[j].previousUpdate[i] = update;
897 for(k=0; k<numOutputNeurons; k++){
899 update = learningRate*deltaO[k] + learningMomentum*outputLayer[k].previousBiasUpdate;
902 outputLayer[k].bias += update;
905 outputLayer[k].previousBiasUpdate = update;
909 for(j=0; j<numHiddenNeurons; j++){
911 update = learningRate*deltaH[j] + learningMomentum*hiddenLayer[j].previousBiasUpdate;
914 hiddenLayer[j].bias += update;
917 hiddenLayer[j].previousBiasUpdate = update;
926 if( inputNeuronsOutput.size() != numInputNeurons ) inputNeuronsOutput.
resize(numInputNeurons,0);
927 if( hiddenNeuronsOutput.size() != numHiddenNeurons ) hiddenNeuronsOutput.
resize(numHiddenNeurons,0);
928 if( outputNeuronsOutput.size() != numOutputNeurons ) outputNeuronsOutput.
resize(numOutputNeurons,0);
934 for(i=0; i<numInputNeurons; i++){
935 trainingExample[i] =
scale(trainingExample[i],inputVectorRanges[i].minValue,inputVectorRanges[i].maxValue,outputTargets.minValue,outputTargets.maxValue);
941 for(i=0; i<numInputNeurons; i++){
942 input[0] = trainingExample[i];
943 inputNeuronsOutput[i] = inputLayer[i].fire( input );
947 for(j=0; j<numHiddenNeurons; j++){
948 hiddenNeuronsOutput[j] = hiddenLayer[j].fire( inputNeuronsOutput );
952 for(k=0; k<numOutputNeurons; k++){
953 outputNeuronsOutput[k] = outputLayer[k].fire( hiddenNeuronsOutput );
958 for(k=0; k<numOutputNeurons; k++){
959 outputNeuronsOutput[k] =
scale(outputNeuronsOutput[k],outputTargets.minValue,outputTargets.maxValue,targetVectorRanges[k].minValue,targetVectorRanges[k].maxValue);
963 return outputNeuronsOutput;
968 if( inputNeuronsOutput.size() != numInputNeurons ) inputNeuronsOutput.
resize(numInputNeurons,0);
969 if( hiddenNeuronsOutput.size() != numHiddenNeurons ) hiddenNeuronsOutput.
resize(numHiddenNeurons,0);
970 if( outputNeuronsOutput.size() != numOutputNeurons ) outputNeuronsOutput.
resize(numOutputNeurons,0);
976 for(i=0; i<numInputNeurons; i++){
978 inputNeuronsOutput[i] = inputLayer[i].fire( input );
982 for(j=0; j<numHiddenNeurons; j++){
983 hiddenNeuronsOutput[j] = hiddenLayer[j].fire( inputNeuronsOutput );
987 for(k=0; k<numOutputNeurons; k++){
988 outputNeuronsOutput[k] = outputLayer[k].fire( hiddenNeuronsOutput );
994 std::cout<<
"***************** MLP *****************\n";
995 std::cout<<
"NumInputNeurons: "<<numInputNeurons<< std::endl;
996 std::cout<<
"NumHiddenNeurons: "<<numHiddenNeurons<< std::endl;
997 std::cout<<
"NumOutputNeurons: "<<numOutputNeurons<< std::endl;
999 std::cout <<
"ScalingEnabled: " << useScaling << std::endl;
1002 std::cout <<
"InputRanges: " << std::endl;
1003 for(UINT i=0; i<numInputNeurons; i++){
1004 std::cout <<
"Input: " << i <<
"\t" << inputVectorRanges[i].minValue <<
"\t" << inputVectorRanges[i].maxValue << std::endl;
1007 std::cout <<
"OutputRanges: " << std::endl;
1008 for(UINT i=0; i<numOutputNeurons; i++){
1009 std::cout <<
"Output: " << i <<
"\t" << targetVectorRanges[i].minValue <<
"\t" << targetVectorRanges[i].maxValue << std::endl;
1013 std::cout<<
"InputWeights:\n";
1014 for(UINT i=0; i<numInputNeurons; i++){
1015 std::cout<<
"Neuron: "<<i<<
" Bias: " << inputLayer[i].bias <<
" Weights: ";
1016 for(UINT j=0; j<inputLayer[i].weights.
getSize(); j++){
1017 std::cout << inputLayer[i].weights[j] <<
"\t";
1018 } std::cout << std::endl;
1021 std::cout<<
"HiddenWeights:\n";
1022 for(UINT i=0; i<numHiddenNeurons; i++){
1023 std::cout<<
"Neuron: "<<i<<
" Bias: " << hiddenLayer[i].bias <<
" Weights: ";
1024 for(UINT j=0; j<hiddenLayer[i].weights.
getSize(); j++){
1025 std::cout << hiddenLayer[i].weights[j] <<
"\t";
1026 } std::cout << std::endl;
1029 std::cout<<
"OutputWeights:\n";
1030 for(UINT i=0; i<numOutputNeurons; i++){
1031 std::cout<<
"Neuron: "<<i<<
" Bias: " << outputLayer[i].bias <<
" Weights: ";
1032 for(UINT j=0; j<outputLayer[i].weights.
getSize(); j++){
1033 std::cout << outputLayer[i].weights[j] <<
"\t";
1034 } std::cout << std::endl;
1042 for(UINT i=0; i<numInputNeurons; i++){
1043 if( isNAN(inputLayer[i].bias) )
return true;
1044 N = inputLayer[i].weights.
getSize();
1045 for(UINT j=0; j<N; j++){
1046 if( isNAN(inputLayer[i].weights[j]) )
return true;
1050 for(UINT i=0; i<numHiddenNeurons; i++){
1051 if( isNAN(hiddenLayer[i].bias) )
return true;
1052 N = hiddenLayer[i].weights.
getSize();
1053 for(UINT j=0; j<N; j++){
1054 if( isNAN(hiddenLayer[i].weights[j]) )
return true;
1058 for(UINT i=0; i<numOutputNeurons; i++){
1059 if( isNAN(outputLayer[i].bias) )
return true;
1060 N = outputLayer[i].weights.
getSize();
1061 for(UINT j=0; j<N; j++){
1062 if( isNAN(outputLayer[i].weights[j]) )
return true;
1069 bool inline MLP::isNAN(
const Float &v)
const{
1070 if( v != v )
return true;
1076 if( !file.is_open() ){
1077 errorLog << __GRT_LOG__ <<
" File is not open!" << std::endl;
1081 file <<
"GRT_MLP_FILE_V2.0\n";
1085 errorLog << __GRT_LOG__ <<
" Failed to save Regressifier base settings to file!" << std::endl;
1089 file <<
"NumInputNeurons: "<<numInputNeurons<< std::endl;
1090 file <<
"NumHiddenNeurons: "<<numHiddenNeurons<< std::endl;
1091 file <<
"NumOutputNeurons: "<<numOutputNeurons<< std::endl;
1095 file <<
"NumRandomTrainingIterations: " << numRestarts << std::endl;
1096 file <<
"Momentum: " << momentum << std::endl;
1097 file <<
"Gamma: " << gamma << std::endl;
1098 file <<
"ClassificationMode: " << classificationModeActive << std::endl;
1099 file <<
"UseNullRejection: " << useNullRejection << std::endl;
1100 file <<
"RejectionThreshold: " << nullRejectionThreshold << std::endl;
1103 file <<
"InputLayer: \n";
1104 for(UINT i=0; i<numInputNeurons; i++){
1105 file <<
"InputNeuron: " << i+1 << std::endl;
1106 file <<
"NumInputs: " << inputLayer[i].numInputs << std::endl;
1107 file <<
"Bias: " << inputLayer[i].bias << std::endl;
1108 file <<
"Gamma: " << inputLayer[i].gamma << std::endl;
1109 file <<
"Weights: " << std::endl;
1110 for(UINT j=0; j<inputLayer[i].numInputs; j++){
1111 file << inputLayer[i].weights[j] <<
"\t";
1117 file <<
"HiddenLayer: \n";
1118 for(UINT i=0; i<numHiddenNeurons; i++){
1119 file <<
"HiddenNeuron: " << i+1 << std::endl;
1120 file <<
"NumInputs: " << hiddenLayer[i].numInputs << std::endl;
1121 file <<
"Bias: " << hiddenLayer[i].bias << std::endl;
1122 file <<
"Gamma: " << hiddenLayer[i].gamma << std::endl;
1123 file <<
"Weights: " << std::endl;
1124 for(UINT j=0; j<hiddenLayer[i].numInputs; j++){
1125 file << hiddenLayer[i].weights[j] <<
"\t";
1131 file <<
"OutputLayer: \n";
1132 for(UINT i=0; i<numOutputNeurons; i++){
1133 file <<
"OutputNeuron: " << i+1 << std::endl;
1134 file <<
"NumInputs: " << outputLayer[i].numInputs << std::endl;
1135 file <<
"Bias: " << outputLayer[i].bias << std::endl;
1136 file <<
"Gamma: " << outputLayer[i].gamma << std::endl;
1137 file <<
"Weights: " << std::endl;
1138 for(UINT j=0; j<outputLayer[i].numInputs; j++){
1139 file << outputLayer[i].weights[j] <<
"\t";
1150 std::string activationFunction;
1155 if( !file.is_open() ){
1156 errorLog << __GRT_LOG__ <<
" File is not open!" << std::endl;
1165 if( word ==
"GRT_MLP_FILE_V1.0" ){
1166 return loadLegacyModelFromFile( file );
1170 if( word !=
"GRT_MLP_FILE_V2.0" ){
1172 errorLog << __GRT_LOG__ <<
" Failed to find file header!" << std::endl;
1179 errorLog << __GRT_LOG__ <<
" Failed to load regressifier base settings from file!" << std::endl;
1184 if(word !=
"NumInputNeurons:"){
1186 errorLog << __GRT_LOG__ <<
" Failed to find NumInputNeurons!" << std::endl;
1189 file >> numInputNeurons;
1190 numInputDimensions = numInputNeurons;
1193 if(word !=
"NumHiddenNeurons:"){
1195 errorLog << __GRT_LOG__ <<
" Failed to find NumHiddenNeurons!" << std::endl;
1198 file >> numHiddenNeurons;
1201 if(word !=
"NumOutputNeurons:"){
1203 errorLog << __GRT_LOG__ <<
" Failed to find NumOutputNeurons!" << std::endl;
1206 file >> numOutputNeurons;
1209 if(word !=
"InputLayerActivationFunction:"){
1211 errorLog << __GRT_LOG__ <<
" Failed to find InputLayerActivationFunction!" << std::endl;
1214 file >> activationFunction;
1218 if(word !=
"HiddenLayerActivationFunction:"){
1220 errorLog << __GRT_LOG__ <<
" Failed to find HiddenLayerActivationFunction!" << std::endl;
1223 file >> activationFunction;
1227 if(word !=
"OutputLayerActivationFunction:"){
1229 errorLog << __GRT_LOG__ <<
" Failed to find OutputLayerActivationFunction!" << std::endl;
1232 file >> activationFunction;
1236 if(word !=
"NumRandomTrainingIterations:"){
1238 errorLog << __GRT_LOG__ <<
" Failed to find NumRandomTrainingIterations!" << std::endl;
1241 file >> numRestarts;
1244 if(word !=
"Momentum:"){
1246 errorLog << __GRT_LOG__ <<
" Failed to find Momentum!" << std::endl;
1252 if(word !=
"Gamma:"){
1254 errorLog << __GRT_LOG__ <<
" Failed to find Gamma!" << std::endl;
1260 if(word !=
"ClassificationMode:"){
1262 errorLog << __GRT_LOG__ <<
" Failed to find ClassificationMode!" << std::endl;
1265 file >> classificationModeActive;
1268 if(word !=
"UseNullRejection:"){
1270 errorLog << __GRT_LOG__ <<
" Failed to find UseNullRejection!" << std::endl;
1273 file >> useNullRejection;
1276 if(word !=
"RejectionThreshold:"){
1278 errorLog << __GRT_LOG__ <<
" Failed to find RejectionThreshold!" << std::endl;
1281 file >> nullRejectionThreshold;
1283 if( trained ) initialized =
true;
1284 else init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1289 inputLayer.
resize( numInputNeurons );
1290 hiddenLayer.
resize( numHiddenNeurons );
1291 outputLayer.
resize( numOutputNeurons );
1295 if(word !=
"InputLayer:"){
1297 errorLog << __GRT_LOG__ <<
" Failed to find InputLayer!" << std::endl;
1301 for(UINT i=0; i<numInputNeurons; i++){
1302 UINT tempNeuronID = 0;
1305 if(word !=
"InputNeuron:"){
1307 errorLog << __GRT_LOG__ <<
" Failed to find InputNeuron!" << std::endl;
1310 file >> tempNeuronID;
1312 if( tempNeuronID != i+1 ){
1314 errorLog << __GRT_LOG__ <<
" InputNeuron ID does not match!" << std::endl;
1319 if(word !=
"NumInputs:"){
1321 errorLog << __GRT_LOG__ <<
" Failed to find NumInputs!" << std::endl;
1324 file >> inputLayer[i].numInputs;
1327 inputLayer[i].weights.
resize( inputLayer[i].numInputs );
1330 if(word !=
"Bias:"){
1332 errorLog << __GRT_LOG__ <<
" Failed to find Bias!" << std::endl;
1335 file >> inputLayer[i].bias;
1338 if(word !=
"Gamma:"){
1340 errorLog << __GRT_LOG__ <<
" Failed to find Gamma!" << std::endl;
1343 file >> inputLayer[i].gamma;
1346 if(word !=
"Weights:"){
1348 errorLog << __GRT_LOG__ <<
" Failed to find Weights!" << std::endl;
1352 for(UINT j=0; j<inputLayer[i].numInputs; j++){
1353 file >> inputLayer[i].weights[j];
1359 if(word !=
"HiddenLayer:"){
1361 errorLog << __GRT_LOG__ <<
" Failed to find HiddenLayer!" << std::endl;
1365 for(UINT i=0; i<numHiddenNeurons; i++){
1366 UINT tempNeuronID = 0;
1369 if(word !=
"HiddenNeuron:"){
1371 errorLog << __GRT_LOG__ <<
" Failed to find HiddenNeuron!" << std::endl;
1374 file >> tempNeuronID;
1376 if( tempNeuronID != i+1 ){
1378 errorLog << __GRT_LOG__ <<
" Failed to find HiddenNeuron ID does not match!" << std::endl;
1383 if(word !=
"NumInputs:"){
1385 errorLog << __GRT_LOG__ <<
" Failed to find NumInputs!" << std::endl;
1388 file >> hiddenLayer[i].numInputs;
1391 hiddenLayer[i].weights.
resize( hiddenLayer[i].numInputs );
1394 if(word !=
"Bias:"){
1396 errorLog << __GRT_LOG__ <<
" Failed to find Bias!" << std::endl;
1399 file >> hiddenLayer[i].bias;
1402 if(word !=
"Gamma:"){
1404 errorLog << __GRT_LOG__ <<
" Failed to find Gamma!" << std::endl;
1407 file >> hiddenLayer[i].gamma;
1410 if(word !=
"Weights:"){
1412 errorLog << __GRT_LOG__ <<
" Failed to find Weights!" << std::endl;
1416 for(
unsigned int j=0; j<hiddenLayer[i].numInputs; j++){
1417 file >> hiddenLayer[i].weights[j];
1423 if(word !=
"OutputLayer:"){
1425 errorLog << __GRT_LOG__ <<
" Failed to find OutputLayer!" << std::endl;
1429 for(UINT i=0; i<numOutputNeurons; i++){
1430 UINT tempNeuronID = 0;
1433 if(word !=
"OutputNeuron:"){
1435 errorLog << __GRT_LOG__ <<
" Failed to find OutputNeuron!" << std::endl;
1438 file >> tempNeuronID;
1440 if( tempNeuronID != i+1 ){
1442 errorLog << __GRT_LOG__ <<
" Failed to find OuputNeuron ID does not match!!" << std::endl;
1447 if(word !=
"NumInputs:"){
1449 errorLog << __GRT_LOG__ <<
" Failed to find NumInputs!" << std::endl;
1452 file >> outputLayer[i].numInputs;
1455 outputLayer[i].weights.
resize( outputLayer[i].numInputs );
1458 if(word !=
"Bias:"){
1460 errorLog << __GRT_LOG__ <<
" Failed to find Bias!" << std::endl;
1463 file >> outputLayer[i].bias;
1466 if(word !=
"Gamma:"){
1468 errorLog << __GRT_LOG__ <<
" Failed to find Gamma!" << std::endl;
1471 file >> outputLayer[i].gamma;
1474 if(word !=
"Weights:"){
1476 errorLog << __GRT_LOG__ <<
" Failed to find Weights!" << std::endl;
1480 for(UINT j=0; j<outputLayer[i].numInputs; j++){
1481 file >> outputLayer[i].weights[j];
1493 if( classificationModeActive )
1494 return numOutputNeurons;
1499 return numInputNeurons;
1503 return numHiddenNeurons;
1507 return numOutputNeurons;
1511 return inputLayerActivationFunction;
1515 return hiddenLayerActivationFunction;
1519 return outputLayerActivationFunction;
1527 return learningRate;
1539 return trainingError;
1543 return classificationModeActive;
1547 return !classificationModeActive;
1563 return trainingErrorLog;
1567 return useNullRejection;
1571 return nullRejectionCoeff;
1575 return nullRejectionThreshold;
1579 if( trained )
return maxLikelihood;
1584 if( trained && classificationModeActive )
return classLikelihoods;
1590 if( trained && classificationModeActive )
return regressionData;
1595 if( trained && classificationModeActive )
return predictedClassLabel;
1600 std::string activationName;
1602 switch(activationFunction){
1603 case(Neuron::LINEAR):
1604 activationName =
"LINEAR";
1606 case(Neuron::SIGMOID):
1607 activationName =
"SIGMOID";
1609 case(Neuron::BIPOLAR_SIGMOID):
1610 activationName =
"BIPOLAR_SIGMOID";
1613 activationName =
"TANH";
1616 activationName =
"UNKNOWN";
1620 return activationName;
1624 Neuron::Type activationFunction = Neuron::LINEAR;
1626 if(activationName ==
"LINEAR" ){
1627 activationFunction = Neuron::LINEAR;
1628 return activationFunction;
1630 if(activationName ==
"SIGMOID" ){
1631 activationFunction = Neuron::SIGMOID;
1632 return activationFunction;
1634 if(activationName ==
"BIPOLAR_SIGMOID" ){
1635 activationFunction = Neuron::BIPOLAR_SIGMOID;
1636 return activationFunction;
1638 if(activationName ==
"TANH" ){
1639 activationFunction = Neuron::TANH;
1640 return activationFunction;
1642 return activationFunction;
1646 if( actvationFunction >= Neuron::LINEAR && actvationFunction < Neuron::NUMBER_OF_ACTIVATION_FUNCTIONS )
return true;
1653 warningLog << __GRT_LOG__ <<
" The activation function is not valid. It should be one of the Neuron ActivationFunctions enums." << std::endl;
1656 this->inputLayerActivationFunction = activationFunction;
1659 return init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1669 warningLog << __GRT_LOG__ <<
" The activation function is not valid. It should be one of the Neuron ActivationFunctions enums." << std::endl;
1672 this->hiddenLayerActivationFunction = activationFunction;
1675 return init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1685 warningLog << __GRT_LOG__ <<
" The activation function is not valid. It should be one of the Neuron ActivationFunctions enums." << std::endl;
1688 this->outputLayerActivationFunction = activationFunction;
1691 return init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1702 if( momentum >= 0 && momentum <= 1.0 ){
1703 this->momentum = momentum;
1712 warningLog << __GRT_LOG__ <<
" Gamma must be greater than zero!" << std::endl;
1715 this->gamma = gamma;
1718 return init(numInputNeurons,numHiddenNeurons,numOutputNeurons);
1724 bool MLP::setNumRandomTrainingIterations(
const UINT numRandomTrainingIterations){
1729 this->useNullRejection = useNullRejection;
1734 if( nullRejectionCoeff > 0 ){
1735 this->nullRejectionCoeff = nullRejectionCoeff;
1741 bool MLP::loadLegacyModelFromFile( std::fstream &file ){
1746 if(word !=
"NumInputNeurons:"){
1748 errorLog << __GRT_LOG__ <<
" Failed to find NumInputNeurons!" << std::endl;
1751 file >> numInputNeurons;
1752 numInputDimensions = numInputNeurons;
1755 if(word !=
"NumHiddenNeurons:"){
1757 errorLog << __GRT_LOG__ <<
" Failed to find NumHiddenNeurons!" << std::endl;
1760 file >> numHiddenNeurons;
1763 if(word !=
"NumOutputNeurons:"){
1765 errorLog << __GRT_LOG__ <<
" Failed to find NumOutputNeurons!" << std::endl;
1768 file >> numOutputNeurons;
1771 if(word !=
"InputLayerActivationFunction:"){
1773 errorLog << __GRT_LOG__ <<
" Failed to find InputLayerActivationFunction!" << std::endl;
1780 if(word !=
"HiddenLayerActivationFunction:"){
1782 errorLog << __GRT_LOG__ <<
" Failed to find HiddenLayerActivationFunction!" << std::endl;
1789 if(word !=
"OutputLayerActivationFunction:"){
1791 errorLog << __GRT_LOG__ <<
" Failed to find OutputLayerActivationFunction!" << std::endl;
1798 if(word !=
"MinNumEpochs:"){
1800 errorLog << __GRT_LOG__ <<
" Failed to find MinNumEpochs!" << std::endl;
1803 file >> minNumEpochs;
1806 if(word !=
"MaxNumEpochs:"){
1808 errorLog << __GRT_LOG__ <<
" Failed to find MaxNumEpochs!" << std::endl;
1811 file >> maxNumEpochs;
1814 if(word !=
"NumRandomTrainingIterations:"){
1816 errorLog << __GRT_LOG__ <<
" Failed to find NumRandomTrainingIterations!" << std::endl;
1819 file >> numRestarts;
1822 if(word !=
"ValidationSetSize:"){
1824 errorLog << __GRT_LOG__ <<
" Failed to find ValidationSetSize!" << std::endl;
1827 file >> validationSetSize;
1830 if(word !=
"MinChange:"){
1832 errorLog << __GRT_LOG__ <<
" Failed to find MinChange!" << std::endl;
1838 if(word !=
"TrainingRate:"){
1840 errorLog << __GRT_LOG__ <<
" Failed to find TrainingRate!" << std::endl;
1843 file >> learningRate;
1846 if(word !=
"Momentum:"){
1848 errorLog << __GRT_LOG__ <<
" Failed to find Momentum!" << std::endl;
1854 if(word !=
"Gamma:"){
1856 errorLog << __GRT_LOG__ <<
" Failed to find Gamma!" << std::endl;
1862 if(word !=
"UseValidationSet:"){
1864 errorLog << __GRT_LOG__ <<
" Failed to find UseValidationSet!" << std::endl;
1867 file >> useValidationSet;
1870 if(word !=
"RandomiseTrainingOrder:"){
1872 errorLog << __GRT_LOG__ <<
" Failed to find RandomiseTrainingOrder!" << std::endl;
1875 file >> randomiseTrainingOrder;
1878 if(word !=
"UseScaling:"){
1880 errorLog << __GRT_LOG__ <<
" Failed to find UseScaling!" << std::endl;
1886 if(word !=
"ClassificationMode:"){
1888 errorLog << __GRT_LOG__ <<
" Failed to find ClassificationMode!" << std::endl;
1891 file >> classificationModeActive;
1894 if(word !=
"UseNullRejection:"){
1896 errorLog << __GRT_LOG__ <<
" Failed to find UseNullRejection!" << std::endl;
1899 file >> useNullRejection;
1902 if(word !=
"RejectionThreshold:"){
1904 errorLog << __GRT_LOG__ <<
" Failed to find RejectionThreshold!" << std::endl;
1907 file >> nullRejectionThreshold;
1910 inputLayer.
resize( numInputNeurons );
1911 hiddenLayer.
resize( numHiddenNeurons );
1912 outputLayer.
resize( numOutputNeurons );
1916 if(word !=
"InputLayer:"){
1918 errorLog << __GRT_LOG__ <<
" Failed to find InputLayer!" << std::endl;
1922 for(UINT i=0; i<numInputNeurons; i++){
1923 UINT tempNeuronID = 0;
1926 if(word !=
"InputNeuron:"){
1928 errorLog << __GRT_LOG__ <<
" Failed to find InputNeuron!" << std::endl;
1931 file >> tempNeuronID;
1933 if( tempNeuronID != i+1 ){
1935 errorLog << __GRT_LOG__ <<
" InputNeuron ID does not match!" << std::endl;
1940 if(word !=
"NumInputs:"){
1942 errorLog << __GRT_LOG__ <<
" Failed to find NumInputs!" << std::endl;
1945 file >> inputLayer[i].numInputs;
1948 inputLayer[i].weights.
resize( inputLayer[i].numInputs );
1951 if(word !=
"Bias:"){
1953 errorLog << __GRT_LOG__ <<
" Failed to find Bias!" << std::endl;
1956 file >> inputLayer[i].bias;
1959 if(word !=
"Gamma:"){
1961 errorLog << __GRT_LOG__ <<
" Failed to find Gamma!" << std::endl;
1964 file >> inputLayer[i].gamma;
1967 if(word !=
"Weights:"){
1969 errorLog << __GRT_LOG__ <<
" Failed to find Weights!" << std::endl;
1973 for(UINT j=0; j<inputLayer[i].numInputs; j++){
1974 file >> inputLayer[i].weights[j];
1980 if(word !=
"HiddenLayer:"){
1982 errorLog << __GRT_LOG__ <<
" Failed to find HiddenLayer!" << std::endl;
1986 for(UINT i=0; i<numHiddenNeurons; i++){
1987 UINT tempNeuronID = 0;
1990 if(word !=
"HiddenNeuron:"){
1992 errorLog << __GRT_LOG__ <<
" Failed to find HiddenNeuron!" << std::endl;
1995 file >> tempNeuronID;
1997 if( tempNeuronID != i+1 ){
1999 errorLog << __GRT_LOG__ <<
" Failed to find HiddenNeuron ID does not match!" << std::endl;
2004 if(word !=
"NumInputs:"){
2006 errorLog << __GRT_LOG__ <<
" Failed to find NumInputs!" << std::endl;
2009 file >> hiddenLayer[i].numInputs;
2012 hiddenLayer[i].weights.
resize( hiddenLayer[i].numInputs );
2015 if(word !=
"Bias:"){
2017 errorLog << __GRT_LOG__ <<
" Failed to find Bias!" << std::endl;
2020 file >> hiddenLayer[i].bias;
2023 if(word !=
"Gamma:"){
2025 errorLog << __GRT_LOG__ <<
" Failed to find Gamma!" << std::endl;
2028 file >> hiddenLayer[i].gamma;
2031 if(word !=
"Weights:"){
2033 errorLog << __GRT_LOG__ <<
" Failed to find Weights!" << std::endl;
2037 for(
unsigned int j=0; j<hiddenLayer[i].numInputs; j++){
2038 file >> hiddenLayer[i].weights[j];
2044 if(word !=
"OutputLayer:"){
2046 errorLog << __GRT_LOG__ <<
" Failed to find OutputLayer!" << std::endl;
2050 for(UINT i=0; i<numOutputNeurons; i++){
2051 UINT tempNeuronID = 0;
2054 if(word !=
"OutputNeuron:"){
2056 errorLog << __GRT_LOG__ <<
" Failed to find OutputNeuron!" << std::endl;
2059 file >> tempNeuronID;
2061 if( tempNeuronID != i+1 ){
2063 errorLog << __GRT_LOG__ <<
" Failed to find OuputNeuron ID does not match!!" << std::endl;
2068 if(word !=
"NumInputs:"){
2070 errorLog << __GRT_LOG__ <<
" Failed to find NumInputs!" << std::endl;
2073 file >> outputLayer[i].numInputs;
2076 outputLayer[i].weights.
resize( outputLayer[i].numInputs );
2079 if(word !=
"Bias:"){
2081 errorLog << __GRT_LOG__ <<
" Failed to find Bias!" << std::endl;
2084 file >> outputLayer[i].bias;
2087 if(word !=
"Gamma:"){
2089 errorLog << __GRT_LOG__ <<
" Failed to find Gamma!" << std::endl;
2092 file >> outputLayer[i].gamma;
2095 if(word !=
"Weights:"){
2097 errorLog << __GRT_LOG__ <<
" Failed to find Weights!" << std::endl;
2101 for(UINT j=0; j<outputLayer[i].numInputs; j++){
2102 file >> outputLayer[i].weights[j];
2108 inputVectorRanges.
resize( numInputNeurons );
2109 targetVectorRanges.
resize( numOutputNeurons );
2113 if(word !=
"InputVectorRanges:"){
2115 errorLog << __GRT_LOG__ <<
" Failed to find InputVectorRanges!" << std::endl;
2118 for(UINT j=0; j<inputVectorRanges.size(); j++){
2119 file >> inputVectorRanges[j].minValue;
2120 file >> inputVectorRanges[j].maxValue;
2124 if(word !=
"OutputVectorRanges:"){
2126 errorLog << __GRT_LOG__ <<
" Failed to find OutputVectorRanges!" << std::endl;
2129 for(UINT j=0; j<targetVectorRanges.size(); j++){
2130 file >> targetVectorRanges[j].minValue;
2131 file >> targetVectorRanges[j].maxValue;
2141 bool MLP::setOutputTargets(){
2143 switch( outputLayerActivationFunction ){
2144 case Neuron::SIGMOID:
2145 outputTargets.minValue = 0.0;
2146 outputTargets.maxValue = 1.0;
2149 outputTargets.minValue = -1.0;
2150 outputTargets.maxValue = 1.0;
2153 outputTargets.minValue = 0;
2154 outputTargets.maxValue = 1.0;
bool setClassificationResult(unsigned int trainingIteration, Float accuracy, MLBase *trainer)
static std::string getId()
bool setLearningRate(const Float learningRate)
UINT getNumHiddenNeurons() const
std::string getId() const
Neuron::Type getHiddenLayerActivationFunction() const
virtual bool save(std::fstream &file) const
MLP & operator=(const MLP &rhs)
bool setOutputLayerActivationFunction(const Neuron::Type activationFunction)
VectorFloat feedforward(VectorFloat data)
#define DEFAULT_NULL_LIKELIHOOD_VALUE
VectorFloat getClassDistances() const
RegressionData reformatAsRegressionData() const
bool setTrainingRate(const Float trainingRate)
bool setRegressionResult(unsigned int trainingIteration, Float totalSquaredTrainingError, Float rootMeanSquaredTrainingError, MLBase *trainer)
bool getNullRejectionEnabled() const
virtual bool clear() override
Vector< MinMax > getInputRanges() const
bool setNumRestarts(const UINT numRestarts)
virtual bool resize(const unsigned int size)
Float back_prop(const VectorFloat &inputVector, const VectorFloat &targetVector, const Float alpha, const Float beta)
static Float getMin(const VectorFloat &x)
bool copyBaseVariables(const Regressifier *regressifier)
UINT getNumInputDimensions() const
Vector< Neuron > getHiddenLayer() const
bool init(const UINT numInputNeurons, const UINT numHiddenNeurons, const UINT numOutputNeurons)
bool validateActivationFunction(const Neuron::Type avactivationFunction) const
Neuron::Type getOutputLayerActivationFunction() const
Vector< VectorFloat > getTrainingLog() const
UINT getNumRandomTrainingIterations() const
Vector< MinMax > getTargetRanges() const
bool saveBaseSettingsToFile(std::fstream &file) const
virtual bool train_(ClassificationData &trainingData)
bool scale(const Float minTarget, const Float maxTarget)
Neuron::Type activationFunctionFromString(const std::string activationName) const
UINT getNumTargetDimensions() const
Vector< Neuron > getOutputLayer() const
std::string activationFunctionToString(const Neuron::Type activationFunction) const
Vector< Neuron > getInputLayer() const
UINT getNumOutputNeurons() const
bool loadBaseSettingsFromFile(std::fstream &file)
void printNetwork() const
UINT getNumDimensions() const
UINT getNumClasses() const
Float getMomentum() const
virtual bool print() const
bool setNullRejection(const bool useNullRejection)
VectorFloat getClassLikelihoods() const
bool setSeed(const unsigned long long seed=0)
bool setInputLayerActivationFunction(const Neuron::Type activationFunction)
bool setMomentum(const Float momentum)
Float getTrainingError() const
Float getMaximumLikelihood() const
Float getNullRejectionCoeff() const
virtual bool deepCopyFrom(const Regressifier *regressifier)
RegressionData split(const UINT trainingSizePercentage)
bool setGamma(const Float gamma)
int getRandomNumberInt(int minRange, int maxRange)
UINT getNumClasses() const
Float getNullRejectionThreshold() const
virtual bool load(std::fstream &file)
bool setNullRejectionCoeff(const Float nullRejectionCoeff)
bool getRegressionModeActive() const
Float getTrainingRate() const
bool setHiddenLayerActivationFunction(const Neuron::Type activationFunction)
static Float sum(const VectorFloat &x)
UINT getNumSamples() const
Neuron::Type getInputLayerActivationFunction() const
UINT getPredictedClassLabel() const
bool getClassificationModeActive() const
Float scale(const Float &x, const Float &minSource, const Float &maxSource, const Float &minTarget, const Float &maxTarget, const bool constrain=false)
virtual bool predict_(VectorFloat &inputVector)
UINT getNumInputNeurons() const