21 #define GRT_DLL_EXPORTS
27 activationFunction = LINEAR;
31 previousBiasUpdate = 0;
36 bool Neuron::init(
const UINT numInputs,
const UINT activationFunction){
38 if( !validateActivationFunction(activationFunction) ){
42 this->numInputs = numInputs;
43 this->activationFunction = activationFunction;
46 previousUpdate.
resize(numInputs);
50 random.
setSeed( (
unsigned long long)time(NULL) );
54 for(
unsigned int i=0; i<numInputs; i++){
56 previousUpdate[i] = 0;
68 previousBiasUpdate = 0;
70 previousUpdate.clear();
78 switch( activationFunction ){
81 for(i=0; i<numInputs; i++){
82 y += x[i] * weights[i];
87 for(i=0; i<numInputs; i++){
88 y += x[i] * weights[i];
92 if( y < -45.0 ){ y = 0; }
93 else if( y > 45.0 ){ y = 1.0; }
95 y = 1.0/(1.0+exp(-y));
98 case(BIPOLAR_SIGMOID):
100 for(i=0; i<numInputs; i++){
101 y += x[i] * weights[i];
104 if( y < -45.0 ){ y = 0; }
105 else if( y > 45.0 ){ y = 1.0; }
107 y = (2.0 / (1.0 + exp(-gamma * y))) - 1.0;
115 Float Neuron::getDerivative(
const Float &y){
118 switch( activationFunction ){
125 case(BIPOLAR_SIGMOID):
126 yy = (gamma * (1.0 - (y*y))) / 2.0;
132 bool Neuron::validateActivationFunction(
const UINT actvationFunction){
133 if( actvationFunction >= LINEAR && actvationFunction < NUMBER_OF_ACTIVATION_FUNCTIONS )
return true;
This class implements a Neuron that is used by the Multilayer Perceptron.
virtual bool resize(const unsigned int size)
Float getRandomNumberUniform(Float minRange=0.0, Float maxRange=1.0)
void setSeed(unsigned long long seed=0)