Ejemplo n.º 1
0
        public void BackPropagationIsCorrect()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2 }, { 3, 5 }
            });
            Layer2 layer = Layer2.CreateLinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 7, 11 });

            NetworkMatrix weightsGradientCheck = new NetworkMatrix(new double[, ] {
                { 7, -7 }, { 11, -11 }
            });

            Assert.AreEqual(weightsGradientCheck, layer.WeightsGradient(outputgradient));

            NetworkVector biasesGradientCheck = new NetworkVector(new double[] { 7, 11 });

            Assert.AreEqual(biasesGradientCheck, layer.BiasesGradient(outputgradient));

            NetworkVector inputGradientCheck = new NetworkVector(new double[] { 40, 69 });

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }
Ejemplo n.º 2
0
        public void InputGradientRuns()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1 }
            });
            NetworkVector outputgradient = new NetworkVector(new double[] { 1 });
            Layer2        layer          = Layer2.CreateLogisticLayer(weights);

            NetworkVector inputGradientCheck = new NetworkVector(new double[] { 0 });

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }
Ejemplo n.º 3
0
        public void InputGradientRunsTwoByThree()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2, 3 }, { 2, 3, 4 }
            });
            Layer2 layer = Layer2.CreateLinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 });

            NetworkVector inputGradientCheck = new NetworkVector(new double[] { 3, 5, 7 });

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }
Ejemplo n.º 4
0
        public void BackpropagateRunsWithNonzeroLayerInput()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1 }
            });
            Layer2 layer = Layer2.CreateLinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 2 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1 });

            NetworkVector inputGradientCheck = new NetworkVector(new double[] { 1 });

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }
Ejemplo n.º 5
0
        public void InputGradientRunsTwoByThree()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2, 3 }, { 2, 3, 4 }
            });
            Layer2 layer = Layer2.CreateLogisticLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 });

            NetworkVector inputGradientCheck = new NetworkVector(
                new double[] { 0.31498075621051952, 0.52496792701753248, 0.7349550978245456 }
                );

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }