public void Descend(double[] input, double momentum, double learningrate, bool output, bool usemomentum) { for (int i = 0; i < Length; i++) { for (int ii = 0; ii < InputLength; ii++) { //Weight gradients WeightGradient[i, ii] = input[ii] * ActivationFunctions.TanhDerriv(Values[i]) * Errors[i]; if (usemomentum) { WeightMomentum[i, ii] = (WeightMomentum[i, ii] * momentum) - (learningrate * WeightGradient[i, ii]); WeightGradient[i, ii] += WeightMomentum[i, ii]; } } if (output) { continue; } //Bias gradients BiasGradient[i] = ActivationFunctions.TanhDerriv(Values[i]) * Errors[i]; if (usemomentum) { BiasMomentum[i] = (BiasMomentum[i] * momentum) - (learningrate * BiasGradient[i]); BiasGradient[i] += BiasMomentum[i]; } } }
public void Backprop(Layer output) { Errors = new double[Length]; for (int k = 0; k < output.Length; k++) { for (int j = 0; j < Length; j++) { Errors[j] += output.Weights[k, j] * ActivationFunctions.TanhDerriv(output.Values[k]) * output.Errors[k]; } } }