// Replaces three steps - (b), (c) and (d) - with one. public void Backpropagate() { BackpropagationNeuron targetNeuron = TargetNeuron as BackpropagationNeuron; gradient = targetNeuron.Gradient * targetNeuron.Derivative * SourceNeuron.Output; error += gradient; }
/// <summary> /// Calculates the gradient using the backpropagation algorithm. /// <em>(2.19)</em> /// </summary> public void CalculateGradient() { // Initialize the error. gradient = 0.0; // Back-propagate the weighted error from the target neuron to the source neuron via this synapse. foreach (BackpropagationSynapse targetSynapse in TargetSynapses) { BackpropagationNeuron targetNeuron = targetSynapse.TargetNeuron as BackpropagationNeuron; gradient += targetNeuron.Gradient * targetNeuron.Derivative * targetSynapse.Weight; } }
/// <summary> /// /// </summary> /// <param name="layer">The (activation) layer to be decorated as backpropagation (activation) layer.</param> /// <param name="parnetNetwork">The parnet network.</param> public BackpropagationLayer(IActivationLayer activationLayer, INetwork parnetNetwork) : base(activationLayer,parnetNetwork) { // Ensure the activation function of the neuron is derivable. if (!(ActivationFunction is IDerivableActivationFunction)) { // TODO: Throw an exception informing the client that in order for the neuron to undergo training // using the error backpropagation algorithm, its activation function has to be derivable // (i.e. it has to implement the IDerivableActivationFunction interface) throw new Exception(); } // Decorate the neurons. for (int i = 0; i < NeuronCount; i++) { Neurons[ i ] = new BackpropagationNeuron(Neurons[i],this); } }
/// <summary> /// Calculatates the gradient of the synapse using the backpropagation algorithm. /// <em>(2.17)</em> /// </summary> public void CalculateGradient() { BackpropagationNeuron targetNeuron = TargetNeuron as BackpropagationNeuron; gradient = targetNeuron.Gradient * targetNeuron.Derivative * SourceNeuron.Output; }