public void InitializeLayer(int NumberOfInput, int NumberOfOutput, ActivationStrategy ActivationStrategy) { this.NumberOfInput = NumberOfInput; this.NumberOfOutput = NumberOfOutput; this.ActivationStrategy = ActivationStrategy; this.Input = new double[numberOfInput]; this.Output = new double[NumberOfOutput]; this.Weights = new double[numberOfOutput, numberOfInput]; this.Gamma = new double[numberOfOutput]; this.WeightsDelta = new double[numberOfOutput, numberOfInput]; this.Error = new double[NumberOfOutput]; InitializeWeights(); }
public double[] FeedForward(double[] Input) { this.Input = Input; for (int i = 0; i < NumberOfOutput; i++) { Output[i] = 0; for (int j = 0; j < NumberOfInput; j++) { Output[i] += this.Input[j] * weights[i, j]; } Output[i] = ActivationStrategy.Activation(Output[i]); } return(Output); }
public void BackPropOutput(double[] expected) { for (int i = 0; i < NumberOfOutput; i++) { Error[i] = Output[i] - expected[i]; } for (int i = 0; i < NumberOfOutput; i++) { Gamma[i] = Error[i] * ActivationStrategy.DeActivation(Output[i]); } for (int i = 0; i < NumberOfOutput; i++) { for (int j = 0; j < NumberOfInput; j++) { WeightsDelta[i, j] = Gamma[i] * Input[j]; } } }
public NeuralNetwork(int[] Layer, ActivationStrategy ActivationFunction) { Layers = new MLPLayer[Layer.Length - 1]; for (int i = 0; i < Layers.Length; i++) { Layers[i] = new MLPLayer(); Layers[i].InitializeLayer(Layer[i], Layer[i + 1], ActivationFunction); if (i < Layers.Length - 1) { this.layerBuild += Layer[i] + "-"; } else { this.layerBuild += Layer[i]; } } this.Activation = ActivationFunction; }
public void BackPropHidden(double[] gammaForward, double[,] weightsForward) { for (int i = 0; i < NumberOfOutput; i++) { Gamma[i] = 0; for (int j = 0; j < gammaForward.Length; j++) { Gamma[i] += gammaForward[j] * weightsForward[j, i]; } Gamma[i] *= ActivationStrategy.DeActivation(Output[i]); } for (int i = 0; i < NumberOfOutput; i++) { for (int j = 0; j < NumberOfInput; j++) { WeightsDelta[i, j] = Gamma[i] * Input[j]; } } }