public double CalculateGradient(double?target = null) //This function is used in back propagation and uses many techniques such as direvative of sigmoid function { if (target == null) { return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value)); //Calculate the error for each neuron in hidden layer } return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value)); //Calculate the error in the output layer }
public double CalculateSigmoidError(double?target = null) { if (target == null) { return(Error = OutputSynapses.Sum(a => a.OutputNeuron.Error * a.Weight) * Sigmoid.Derivative(Output)); } return(Error = Sigmoid.Derivative(Output) * CalculateError(target.Value)); }
public double CalculateGradient(double?target = null) { if (target == null) { return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value)); } return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value)); }
public float CalculateGradient(float?target = null) { if (target == null) { //return Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value); float sum = 0f; for (int i = 0; i < OutputSynapses.Count; i++) { var synapse = OutputSynapses[i]; sum += synapse.OutputNeuron.Gradient * synapse.Weight; } return(Gradient = sum * Sigmoid.Derivative(Value)); } return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value)); }
public void BackPropagate(Pair pair) { var input = pair.input.ToArray(); var output = pair.output.ToArray(); ForwardPass(input); for (var i = 0; i < outputLayer.neurons.Count; i++) { var neuron = outputLayer.neurons[i]; var outputA = neuron.outputA; var expectedOutput = output[i]; neuron.error = expectedOutput - outputA; } for (var layer = outputLayer.previous; layer != null; layer = layer.previous) { foreach (var neuron in layer.neurons) { neuron.error = 0; foreach (var connection in neuron.outputs) { var error = connection.output.error; var weight = connection.weight; neuron.error += error * weight; } } } foreach (var connection in connections) { var error = connection.output.error; var gradient = Sigmoid.Derivative(connection.output.outputA); var previousOutputA = connection.input.outputA; connection.weight += learning_rate * error * gradient * previousOutputA; } for (var layer = inputLayer; layer != null; layer = layer.next) { foreach (var neuron in layer.neurons) { var error = neuron.error; var gradient = Sigmoid.Derivative(neuron.outputA); neuron.bias += learning_rate * error * gradient; } } }
public void BackPropagate(Batch batch) { var learning_rate = 0.01; foreach (var neuron in neurons) { neuron.error = 0; } foreach (var connection in connections) { connection.costCorrection = 0; } foreach (var pair in batch) { var input = pair.input.ToArray(); var output = pair.output.ToArray(); ForwardPass(input); for (var i = 0; i < outputLayer.neurons.Count; i++) { var neuron = outputLayer.neurons[i]; var outputA = neuron.outputA; var expectedOutput = output[i]; var error = expectedOutput - outputA; var dCdA = 2 * error; neuron.error += dCdA / batch.Count; } for (var layer = outputLayer.previous; layer != null; layer = layer.previous) { foreach (var neuron in layer.neurons) { foreach (var connection in neuron.outputs) { var dCdA = connection.output.error; var dAdZ = Sigmoid.Derivative(connection.output.outputA); var dZdA = connection.weight; var dZdW = connection.input.outputA; neuron.error += dCdA * dAdZ * dZdA / batch.Count; connection.costCorrection += dCdA * dAdZ * dZdW / batch.Count; } } } } foreach (var connection in connections) { connection.weight += connection.costCorrection * learning_rate; } for (var layer = inputLayer; layer != null; layer = layer.next) { foreach (var neuron in layer.neurons) { var dCdA = neuron.error; var dAdZ = Sigmoid.Derivative(neuron.outputA); var dZdB = 1; neuron.bias += dCdA * dAdZ * dZdB * learning_rate; } } }