public void Backpropagate(decimal[] Cost, decimal BPrefix = 0) { decimal[] NonLinearZs = new decimal[Perceptrons.Length]; for (int i = 0; i < Perceptrons.Length; i++) { NonLinearZs[i] = Perceptrons[i].GetZ(); } for (int i = 0; i < Perceptrons.Length; i++) { Perceptron P = Perceptrons[i]; BPrefix = 1m; // Prepare the prefix being: (dC0/da0) * (da0/dZ0) BPrefix *= 2 * (P.CurrentActivation - Convert.ToDecimal(LastLabels[i].Strength)); BPrefix *= ActivationFunctions.GetAppropriateDerivativeActivationFunction(LayerActivationFunction) (NonLinearZs, i)[i]; // Update current weights. for (int j = 0; j < P.Weights.Length; j++) { decimal LR = Convert.ToDecimal(parentNeuralNetwork.LearningRate); P.Weights[j].Value -= LR * BPrefix * PreviousLayer.GetInput()[j]; } // Tell last layer to propagate using this perceptron's relative prefix. PreviousLayer.Backpropagate(Cost, BPrefix); } }