Beispiel #1
0
        public void BackPropagationIsCorrect()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2 }, { 3, 5 }
            });
            Layer2 layer = Layer2.CreateLinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 7, 11 });

            NetworkMatrix weightsGradientCheck = new NetworkMatrix(new double[, ] {
                { 7, -7 }, { 11, -11 }
            });

            Assert.AreEqual(weightsGradientCheck, layer.WeightsGradient(outputgradient));

            NetworkVector biasesGradientCheck = new NetworkVector(new double[] { 7, 11 });

            Assert.AreEqual(biasesGradientCheck, layer.BiasesGradient(outputgradient));

            NetworkVector inputGradientCheck = new NetworkVector(new double[] { 40, 69 });

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }
Beispiel #2
0
        public void CanUseBigSigmoidLayer()
        {
            double[,] weights = new double[2000, 1000];
            double[] input = new double[1000];

            for (int i = 0; i < 1000; i++)
            {
                weights[i, i] = 1.0;
                input[i]      = (double)i;
            }

            NetworkVector inputvector = new NetworkVector(input);
            Layer2        layer       = Layer2.CreateLogisticLayer(new NetworkMatrix(weights));

            layer.Run(inputvector);

            double[] result = layer.Output.ToArray();
            double   sig0   = logistic(0.0);

            for (int i = 0, j = 1000; i < 1000; i++, j++)
            {
                Assert.AreEqual(logistic((double)i), result[i], "Failed for i = " + i);
                Assert.AreEqual(sig0, result[j], "Failed for j = " + j);
            }
        }
Beispiel #3
0
        public void SigmoidLayerHasRightRun()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 0, 1 }, { 1, 1, 0 }
            });
            NetworkVector inputvector = new NetworkVector(new double[] { 1, 2, 3 });
            Layer2        layer       = Layer2.CreateLogisticLayer(weights);

            layer.Run(inputvector);

            NetworkVector expectedResult = new NetworkVector(new double[] { logistic(4), logistic(3) });

            Assert.AreEqual(expectedResult, layer.Output);
        }
Beispiel #4
0
        public void LinearLayerWithBiasesHasRightRun()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 0, 1 }, { 1, 1, 0 }
            });
            NetworkVector biases      = new NetworkVector(new double[] { 4, 3 });
            NetworkVector inputvector = new NetworkVector(new double[] { 1, 2, 3 });
            Layer2        layer       = Layer2.CreateLinearLayer(weights, biases);

            layer.Run(inputvector);

            NetworkVector expectedResult = new NetworkVector(new double[] { 8, 6 });

            Assert.AreEqual(expectedResult, layer.Output);
        }
Beispiel #5
0
        public void InputGradientRunsTwoByThree()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2, 3 }, { 2, 3, 4 }
            });
            Layer2 layer = Layer2.CreateLinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 });

            NetworkVector inputGradientCheck = new NetworkVector(new double[] { 3, 5, 7 });

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }
Beispiel #6
0
        public void BackpropagateRunsWithNonzeroLayerInput()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1 }
            });
            Layer2 layer = Layer2.CreateLinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 2 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1 });

            NetworkVector inputGradientCheck = new NetworkVector(new double[] { 1 });

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }
Beispiel #7
0
        public void InputGradientRunsTwoByThree()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2, 3 }, { 2, 3, 4 }
            });
            Layer2 layer = Layer2.CreateLogisticLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 });

            NetworkVector inputGradientCheck = new NetworkVector(
                new double[] { 0.31498075621051952, 0.52496792701753248, 0.7349550978245456 }
                );

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
        }
Beispiel #8
0
        public void CanUseBigLinearLayer()
        {
            double[,] matrix = new double[2000, 1000];
            double[] input = new double[1000];

            for (int i = 0; i < 1000; i++)
            {
                matrix[i, i] = 1.0;
                input[i]     = (double)i;
            }

            NetworkMatrix weights     = new NetworkMatrix(matrix);
            NetworkVector inputvector = new NetworkVector(input);
            Layer2        layer       = Layer2.CreateLinearLayer(weights);

            layer.Run(inputvector);
            double[] result = layer.Output.ToArray();

            for (int i = 0, j = 1000; i < 1000; i++, j++)
            {
                Assert.AreEqual((double)i, result[i], "Failed for i = " + i);
                Assert.AreEqual(0.0, result[j], "Failed for j = " + j);
            }
        }