21 #define GRT_DLL_EXPORTS 27 activationFunction = LINEAR;
31 previousBiasUpdate = 0;
34 Neuron::Neuron(
const Neuron &rhs ){
35 this->gamma = rhs.gamma;
36 this->bias = rhs.bias;
37 this->previousBiasUpdate = rhs.previousBiasUpdate;
38 this->weights = rhs.weights;
39 this->previousUpdate = rhs.previousUpdate;
40 this->numInputs = rhs.numInputs;
41 this->activationFunction = rhs.activationFunction;
48 this->gamma = rhs.gamma;
49 this->bias = rhs.bias;
50 this->previousBiasUpdate = rhs.previousBiasUpdate;
51 this->weights = rhs.weights;
52 this->previousUpdate = rhs.previousUpdate;
53 this->numInputs = rhs.numInputs;
54 this->activationFunction = rhs.activationFunction;
60 bool Neuron::init(
const UINT numInputs,
const Type activationFunction,
Random &random,
const Float minWeightRange,
const Float maxWeightRange,
const Float minBiasRange,
const Float maxBiasRange){
62 if( !validateActivationFunction(activationFunction) ){
66 this->numInputs = numInputs;
67 this->activationFunction = activationFunction;
70 previousUpdate.
resize(numInputs);
74 for(
unsigned int i=0; i<numInputs; i++){
75 weights[i] = random.
getUniform(minWeightRange,maxWeightRange);
76 previousUpdate[i] = 0;
80 bias = random.
getUniform(minBiasRange,maxBiasRange);
81 previousBiasUpdate = 0;
89 previousBiasUpdate = 0;
91 previousUpdate.clear();
99 switch( activationFunction ){
102 for(i=0; i<numInputs; i++){
103 y += x[i] * weights[i];
108 for(i=0; i<numInputs; i++){
109 y += x[i] * weights[i];
112 y = 1.0/(1.0+exp(-y));
114 case(BIPOLAR_SIGMOID):
116 for(i=0; i<numInputs; i++){
117 y += x[i] * weights[i];
120 y = (2.0 / (1.0 + exp(-gamma * y))) - 1.0;
124 for(i=0; i<numInputs; i++){
125 y += x[i] * weights[i];
134 Float Neuron::getDerivative(
const Float &y){
137 switch( activationFunction ){
144 case(BIPOLAR_SIGMOID):
145 yy = (gamma * (1.0 - (y*y))) / 2.0;
154 bool Neuron::validateActivationFunction(
const Type actvationFunction){
155 if( actvationFunction >= LINEAR && actvationFunction < NUMBER_OF_ACTIVATION_FUNCTIONS )
return true;
This class implements a Neuron that is used by the Multilayer Perceptron.
This file contains the Random class, a useful wrapper for generating cross platform random functions...
virtual bool resize(const unsigned int size)
Float getUniform(const Float &minRange=0.0, const Float &maxRange=1.0)