public void LinearLayerHasRightRun() { double[,] weights = new double[, ] { { 1, 0, 1 }, { 1, 1, 0 } }; NetworkVector inputvector = new NetworkVector(new double[] { 1, 2, 3 }); Layer layer = new LinearLayer(weights); layer.Run(inputvector); double[] result = layer.Output.ToArray(); double[] expectedResult = new double[] { 4, 3 }; Assert.AreEqual(expectedResult[0], result[0]); Assert.AreEqual(expectedResult[1], result[1]); }
public void BackPropagateIsCorrect() { double[,] weights = new double[, ] { { 1, 2 }, { 3, 5 } }; Layer layer = new LinearLayer(weights); NetworkVector layerinput = new NetworkVector(new double[] { 1, -1 }); layer.Run(layerinput); NetworkVector outputgradient = new NetworkVector(new double[] { 7, 11 }); layer.BackPropagate(outputgradient); double[,] weightsCheck = new double[, ] { { -6, 9 }, { -8, 16 } }; LayerState state = layer.State; for (int i = 0; i < layer.NumberOfInputs; i++) { for (int j = 0; j < layer.NumberOfInputs; j++) { Assert.AreEqual(weightsCheck[i, j], state.Weights[i, j], string.Format("Failed for (i, j) = ({0}, {1}", i, j)); } } double[] biasesCheck = new double[] { -7, -11 }; for (int i = 0; i < layer.NumberOfInputs; i++) { Assert.AreEqual(biasesCheck[i], layer.State.Biases[i]); } double[] inputGradientCheck = new double[] { 40, 69 }; double[] inputGradientValues = layer.InputGradient.ToArray(); for (int i = 0; i < layer.NumberOfInputs; i++) { Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i], string.Format("Failure for input {0}", i)); } }
public void BackpropagateRunsTwoByThree() { double[,] weights = new double[, ] { { 1, 2, 3 }, { 2, 3, 4 } }; Layer layer = new LinearLayer(weights); NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 }); layer.Run(layerinput); NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 }); layer.BackPropagate(outputgradient); double[] inputGradientCheck = new double[] { 3, 5, 7 }; double[] inputGradientValues = layer.InputGradient.ToArray(); for (int i = 0; i < layer.NumberOfInputs; i++) { Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i], string.Format("Failure for input {0}", i)); } }
public void BackpropagateRunsWithNonzeroLayerInput() { double[,] weights = new double[, ] { { 1 } }; Layer layer = new LinearLayer(weights); NetworkVector layerinput = new NetworkVector(new double[] { 2 }); layer.Run(layerinput); NetworkVector outputgradient = new NetworkVector(new double[] { 1 }); layer.BackPropagate(outputgradient); double[] inputGradientCheck = new double[] { 1 }; double[] inputGradientValues = layer.InputGradient.ToArray(); for (int i = 0; i < layer.NumberOfInputs; i++) { Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i]); } }
public void CanUseBigLinearLayer() { double[,] weights = new double[2000, 1000]; double[] input = new double[1000]; for (int i = 0; i < 1000; i++) { weights[i, i] = 1.0; input[i] = (double)i; } NetworkVector inputvector = new NetworkVector(input); Layer layer = new LinearLayer(weights); layer.Run(inputvector); double[] result = layer.Output.ToArray(); for (int i = 0, j = 1000; i < 1000; i++, j++) { Assert.AreEqual((double)i, result[i], "Failed for i = " + i); Assert.AreEqual(0.0, result[j], "Failed for j = " + j); } }