private static void RecurseBackpropagation(Layer layer, Dictionary <Node, double> backwardsPassDeltas, Momentum momentum) { if (!layer.PreviousLayers.Any()) { // input case return; } var deltas = new Dictionary <Node, double>(); for (var i = 0; i < layer.Nodes.Length; i++) { var node = layer.Nodes[i]; var sumDeltaWeights = backwardsPassDeltas.Keys.Sum( backPassNode => backwardsPassDeltas[backPassNode] * backPassNode.Weights[node].Value ); var delta = sumDeltaWeights * LogisticFunction.ComputeDifferentialGivenOutput(node.Output); deltas.Add(node, delta); foreach (var prevNode in node.Weights.Keys) { UpdateNodeWeight(node, prevNode, delta, momentum, i); } foreach (var prevLayer in node.BiasWeights.Keys) { UpdateBiasNodeWeight(node, prevLayer, delta, momentum, i); } } for (var i = 0; i < layer.PreviousLayers.Length; i++) { RecurseBackpropagation(layer.PreviousLayers[i], deltas, momentum?.StepBackwards(i)); } }
private static void DoBackpropagation(Layer outputLayer, double[] currentOutputs, double[] targetOutputs, double learningRate, Momentum momentum) { var backwardsPassDeltas = UpdateOutputLayer(outputLayer, currentOutputs, targetOutputs, learningRate, momentum); for (var i = 0; i < outputLayer.PreviousLayers.Length; i++) { RecurseBackpropagation(outputLayer.PreviousLayers[i], backwardsPassDeltas, momentum?.StepBackwards(i)); } }