예제 #1
0
        public void CanMakeLayer()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2 }, { 3, 4 }
            });

            NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights);
            Assert.IsNotNull(layer);
        }
예제 #2
0
        public void CanMakeLayerWithBiases()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2 }, { 3, 4 }
            });
            NetworkVector biases = new NetworkVector(new double[] { 5, 7 });

            NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights, biases);
            Assert.IsNotNull(layer);
        }
예제 #3
0
        public void NeuralFunctionNotNullRequiresDerivativeNotNull()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2, 3 }, { 3, 4, 5 }
            });
            NetworkVector biases = new NetworkVector(new double[] { 1, 2 });

            try
            {
                NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights, biases, x => 1.0, null);
                Assert.Fail("Attempt to create Layer with non-null _neuralFunction and null _neuralFunctioinDerivative should throw and Argument exception, but did not.");
            }
            catch (ArgumentException)
            { }
        }
예제 #4
0
        public void CannotMakeLayerWithMismatchedBiases()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2 }, { 3, 4 }
            });
            NetworkVector biases = new NetworkVector(new double[] { 5, 7, 11 });

            try
            {
                NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights, biases);
                Assert.Fail("ArgumentException expected but not thrown");
            }
            catch (ArgumentException)
            { }
        }
예제 #5
0
        public void InputGradientRequiresCorrectInputSize()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1 }
            });
            NetworkVector badInput = new NetworkVector(new double[] { 1, 2, 3 });

            NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights);
            try
            {
                layer.InputGradient(badInput);
                Assert.Fail("Backpropogate should throw an ArgumentException if input dimension is not equal to NumberOfNeuron, but did not.");
            }
            catch (ArgumentException) { }
        }
예제 #6
0
        public void LinearLayerHasRightRun()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 0, 1 }, { 1, 1, 0 }
            });
            NetworkVector biases      = new NetworkVector(new double[] { 0, 0 });
            NetworkVector inputvector = new NetworkVector(new double[] { 1, 2, 3 });

            NeuralNet.NetComponent.Layer2 layer = NeuralNet.NetComponent.Layer2.CreateLinearLayer(weights, biases);
            layer.Run(inputvector);

            NetworkVector result         = layer.Output;
            NetworkVector expectedResult = new NetworkVector(new double[] { 4, 3 });

            Assert.AreEqual(expectedResult, result);
        }
예제 #7
0
        public void BackpropagateRuns()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1 }
            });
            NetworkVector outputgradient = new NetworkVector(new double[] { 1 });

            NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights);

            NetworkVector inputGradientCheck   = new NetworkVector(new double[] { 1 });
            NetworkVector biasesGradientCheck  = new NetworkVector(new double[] { 1 });
            NetworkMatrix weightsGradientCheck = new NetworkMatrix(new double[, ] {
                { 0 }
            });

            Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient));
            Assert.AreEqual(biasesGradientCheck, layer.BiasesGradient(outputgradient));
            Assert.AreEqual(weightsGradientCheck, layer.WeightsGradient(outputgradient));
        }