public void BackPropagationIsCorrect() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2 }, { 3, 5 } }); Layer2 layer = Layer2.CreateLinearLayer(weights); NetworkVector layerinput = new NetworkVector(new double[] { 1, -1 }); layer.Run(layerinput); NetworkVector outputgradient = new NetworkVector(new double[] { 7, 11 }); NetworkMatrix weightsGradientCheck = new NetworkMatrix(new double[, ] { { 7, -7 }, { 11, -11 } }); Assert.AreEqual(weightsGradientCheck, layer.WeightsGradient(outputgradient)); NetworkVector biasesGradientCheck = new NetworkVector(new double[] { 7, 11 }); Assert.AreEqual(biasesGradientCheck, layer.BiasesGradient(outputgradient)); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 40, 69 }); Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient)); }