26 activationFunction = LINEAR;
30 previousBiasUpdate = 0;
35 bool Neuron::init(
const UINT numInputs,
const UINT activationFunction){
37 if( !validateActivationFunction(activationFunction) ){
41 this->numInputs = numInputs;
42 this->activationFunction = activationFunction;
45 previousUpdate.
resize(numInputs);
49 random.
setSeed( (
unsigned long long)time(NULL) );
53 for(
unsigned int i=0; i<numInputs; i++){
55 previousUpdate[i] = 0;
67 previousBiasUpdate = 0;
69 previousUpdate.clear();
77 switch( activationFunction ){
80 for(i=0; i<numInputs; i++){
81 y += x[i] * weights[i];
86 for(i=0; i<numInputs; i++){
87 y += x[i] * weights[i];
91 if( y < -45.0 ){ y = 0; }
92 else if( y > 45.0 ){ y = 1.0; }
94 y = 1.0/(1.0+exp(-y));
97 case(BIPOLAR_SIGMOID):
99 for(i=0; i<numInputs; i++){
100 y += x[i] * weights[i];
103 if( y < -45.0 ){ y = 0; }
104 else if( y > 45.0 ){ y = 1.0; }
106 y = (2.0 / (1.0 + exp(-gamma * y))) - 1.0;
114 Float Neuron::getDerivative(
const Float &y){
117 switch( activationFunction ){
124 case(BIPOLAR_SIGMOID):
125 yy = (gamma * (1.0 - (y*y))) / 2.0;
131 bool Neuron::validateActivationFunction(
const UINT actvationFunction){
132 if( actvationFunction >= LINEAR && actvationFunction < NUMBER_OF_ACTIVATION_FUNCTIONS )
return true;
This class implements a Neuron that is used by the Multilayer Perceptron.
virtual bool resize(const unsigned int size)
Float getRandomNumberUniform(Float minRange=0.0, Float maxRange=1.0)
void setSeed(unsigned long long seed=0)