コード例 #1
0
        public void Gradient_1ConvLayer_1Iter_Euclidean()
        {
            // arrange

            var net = new ConvNet(3, 1, 1)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 1, activation: Activation.Atan));
            net._Build();
            net.RandomizeParameters(seed: 0);

            var point1 = RandomPoint(3, 1, 1);
            var point2 = RandomPoint(3, 1, 1); // just for 2 dim output
            var sample = new ClassifiedSample <double[][, ]>();

            sample[point1] = CLASSES[0];
            sample[point2] = CLASSES[1];

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.1D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            alg.RunIteration(point1, EXPECTED[0]);

            // assert
            AssertNetGradient(alg, point1, EXPECTED[0]);
        }
コード例 #2
0
        public void SimpleNet_Euclidean_OneIter()
        {
            // arrange

            var net = Mocks.SimpleLinearNetwork();

            var sample = new ClassifiedSample <double[][, ]>();
            var point  = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;
            alg.Build();

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, alg.Values[0][0][0, 0]);
            Assert.AreEqual(33, alg.Values[1][0][0, 0]);
            Assert.AreEqual(-62, alg.Values[2][0][0, 0]);

            Assert.AreEqual(3, net[0].ActivationFunction.DerivativeFromValue(alg.Values[0][0][0, 0]));
            Assert.AreEqual(3, net[1].ActivationFunction.DerivativeFromValue(alg.Values[1][0][0, 0]));
            Assert.AreEqual(2, net[2].ActivationFunction.DerivativeFromValue(alg.Values[2][0][0, 0]));

            Assert.AreEqual(-126, alg.Errors[2][0][0, 0]);
            Assert.AreEqual(378, alg.Errors[1][0][0, 0]);
            Assert.AreEqual(1134, alg.Errors[0][0][0, 0]);

            Assert.AreEqual(-126 * 33, alg.Gradient[2][0]);
            Assert.AreEqual(-126, alg.Gradient[2][1]);
            Assert.AreEqual(378 * 12, alg.Gradient[1][0]);
            Assert.AreEqual(378, alg.Gradient[1][1]);
            Assert.AreEqual(1134 * 1, alg.Gradient[0][0]);
            Assert.AreEqual(1134, alg.Gradient[0][1]);

            alg.FlushGradient();

            Assert.AreEqual(-1 + 2 * 126 * 33, net[2].Weights[0]);
            Assert.AreEqual(2 + 2 * 126, net[2].Weights[1]);
            Assert.AreEqual(1 + 2 * (-378 * 12), net[1].Weights[0]);
            Assert.AreEqual(-1 + 2 * (-378), net[1].Weights[1]);
            Assert.AreEqual(3 + 2 * (-1134 * 1), net[0].Weights[0]);
            Assert.AreEqual(1 + 2 * (-1134), net[0].Weights[1]);
        }
コード例 #3
0
        public void Gradient_DifferentLayers_1Iter_CrossEntropy_Regularization()
        {
            // arrange

            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 5)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 3, padding: 1));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2, activation: Activation.Exp));
            net.AddLayer(new ActivationLayer(activation: Activation.Tanh));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 3, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var sample = new ClassifiedSample <double[][, ]>();

            for (int i = 0; i < 3; i++)
            {
                var point = RandomPoint(1, 5, 5);
                sample[point] = new Class(i.ToString(), i);
            }

            var regularizator = Regularizator.Composite(Regularizator.L1(0.1D), Regularizator.L2(0.3D));
            var alg           = new BackpropAlgorithm(net)
            {
                LearningRate  = 0.1D,
                LossFunction  = Loss.CrossEntropySoftMax,
                Regularizator = regularizator
            };

            alg.Build();

            // act
            var data     = sample.First();
            var expected = new double[3] {
                1.0D, 0.0D, 0.0D
            };

            alg.RunIteration(data.Key, expected);
            regularizator.Apply(alg.Gradient, alg.Net.Weights);
            ((DropoutLayer)alg.Net[4]).ApplyCustomMask = true;

            // assert
            AssertNetGradient(alg, data.Key, expected);
        }
コード例 #4
0
        public void Gradient_MNISTSimple_1Iter()
        {
            // arrange

            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 14)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 4, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));

            net._Build();

            Randomize(net.Weights, -1.0D, 1.0D);

            var sample = new ClassifiedSample <double[][, ]>();

            for (int i = 0; i < 10; i++)
            {
                var point = RandomPoint(1, 14, 14);
                sample[point] = new Class(i.ToString(), i);
            }

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.005D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            var data     = sample.First();
            var expected = new double[10] {
                1.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D
            };

            alg.RunIteration(data.Key, expected);

            // assert
            AssertNetGradient(alg, data.Key, expected);
        }
コード例 #5
0
        public void SimpleNet_CrossEntropySoftMax_OneIter()
        {
            // arrange

            var net = Mocks.SimpleLinearNetwork2(Activation.ReLU);

            net[2].ActivationFunction = Activation.Logistic(1);

            var sample = new ClassifiedSample <double[][, ]>();
            var point1 = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };
            var point2 = new double[1][, ] {
                new[, ] {
                    { -1.0D }
                }
            };
            var cls1 = new Class("a", 0);
            var cls2 = new Class("b", 1);

            sample[point1] = cls1;
            sample[point2] = cls2;

            var alg = new BackpropAlgorithm(sample, net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.CrossEntropySoftMax;
            alg.Build();

            // act
            alg.RunIteration(point1, cls1);

            // assert
            AssertNetGradient(alg, point1, 2, 1);
            AssertNetGradient(alg, point1, 1, 0);
            AssertNetGradient(alg, point1, 1, 1);
            AssertNetGradient(alg, point1, 0, 0);
            AssertNetGradient(alg, point1, 0, 1);
        }
コード例 #6
0
        public void Gradient_SimpleDropout_1Iter_Euclidean()
        {
            // arrange

            var net = new ConvNet(3, 1)
            {
                IsTraining = true
            };

            net.AddLayer(new DenseLayer(outputDim: 10, activation: Activation.Atan));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: Activation.Atan));
            net._Build();
            net.RandomizeParameters(seed: 0);

            var point1 = RandomPoint(3, 1, 1);
            var point2 = RandomPoint(3, 1, 1); // just for 2 dim output
            var sample = new ClassifiedSample <double[][, ]>();

            sample[point1] = CLASSES[0];
            sample[point2] = CLASSES[1];

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.1D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            alg.RunIteration(point1, EXPECTED[0]);
            ((DropoutLayer)alg.Net[1]).ApplyCustomMask = true;

            // assert
            AssertNetGradient(alg, point1, EXPECTED[0]);
        }
コード例 #7
0
        public void SimpleNet_OneIter_Dropout()
        {
            // arrange

            var drate = 0.5D;
            var dseed = 1;
            var net   = Mocks.SimpleLinearNetworkWithDropout(drate, dseed);

            var sample = new ClassifiedSample <double[]>();
            var point  = new[] { 1.0D };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, net[0][0].Value);
            Assert.AreEqual(66, net[1][0].Value);
            Assert.AreEqual(-128, net[2][0].Value);

            Assert.AreEqual(3, net[0][0].Derivative);
            Assert.AreEqual(3 / drate, net[1][0].Derivative);
            Assert.AreEqual(2, net[2][0].Derivative);

            Assert.AreEqual(-129 * 2, alg.Errors[2][0]);
            Assert.AreEqual(-1 * (-258) * 3 / drate, alg.Errors[1][0]);
            Assert.AreEqual(1548 * 3, alg.Errors[0][0]);

            Assert.AreEqual(-258 * 66, alg.Gradient[2][0, 0]);
            AssertNetGradient(alg, point, 2, 0, 0);

            Assert.AreEqual(-258, alg.Gradient[2][0, 1]);
            AssertNetGradient(alg, point, 2, 0, 1);

            Assert.AreEqual(1548 * 12, alg.Gradient[1][0, 0]);
            AssertNetGradient(alg, point, 1, 0, 0);

            Assert.AreEqual(1548, alg.Gradient[1][0, 1]);
            AssertNetGradient(alg, point, 1, 0, 1);

            Assert.AreEqual(4644 * 1, alg.Gradient[0][0, 0]);
            AssertNetGradient(alg, point, 0, 0, 0);

            Assert.AreEqual(4644, alg.Gradient[0][0, 1]);
            AssertNetGradient(alg, point, 0, 0, 1);


            // act
            alg.FlushGradient();

            // assert

            Assert.AreEqual(2 + 2 * 258, net[2][0].Bias);
            Assert.AreEqual(-1 + 2 * 258 * 66, net[2][0][0]);
            Assert.AreEqual(-1 + 2 * (-1548), net[1][0].Bias);
            Assert.AreEqual(1 + 2 * (-1548 * 12), net[1][0][0]);
            Assert.AreEqual(1 + 2 * (-4644), net[0][0].Bias);
            Assert.AreEqual(3 + 2 * (-4644 * 1), net[0][0][0]);
        }
コード例 #8
0
        public void SimpleNet_OneIter()
        {
            // arrange

            var net = Mocks.SimpleLinearNetwork();

            var sample = new ClassifiedSample <double[]>();
            var point  = new[] { 1.0D };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, net[0][0].Value);
            Assert.AreEqual(33, net[1][0].Value);
            Assert.AreEqual(-62, net[2][0].Value);

            Assert.AreEqual(3, net[0][0].Derivative);
            Assert.AreEqual(3, net[1][0].Derivative);
            Assert.AreEqual(2, net[2][0].Derivative);

            Assert.AreEqual(-126, alg.Errors[2][0]);
            Assert.AreEqual(378, alg.Errors[1][0]);
            Assert.AreEqual(1134, alg.Errors[0][0]);

            Assert.AreEqual(-126 * 33, alg.Gradient[2][0, 0]);
            AssertNetGradient(alg, point, 2, 0, 0);

            Assert.AreEqual(-126, alg.Gradient[2][0, 1]);
            AssertNetGradient(alg, point, 2, 0, 1);

            Assert.AreEqual(378 * 12, alg.Gradient[1][0, 0]);
            AssertNetGradient(alg, point, 1, 0, 0);

            Assert.AreEqual(378, alg.Gradient[1][0, 1]);
            AssertNetGradient(alg, point, 1, 0, 1);

            Assert.AreEqual(1134 * 1, alg.Gradient[0][0, 0]);
            AssertNetGradient(alg, point, 0, 0, 0);

            Assert.AreEqual(1134, alg.Gradient[0][0, 1]);
            AssertNetGradient(alg, point, 0, 0, 1);


            // act
            alg.FlushGradient();

            // assert

            Assert.AreEqual(2 + 2 * 126, net[2][0].Bias);
            Assert.AreEqual(-1 + 2 * 126 * 33, net[2][0][0]);
            Assert.AreEqual(-1 + 2 * (-378), net[1][0].Bias);
            Assert.AreEqual(1 + 2 * (-378 * 12), net[1][0][0]);
            Assert.AreEqual(1 + 2 * (-1134), net[0][0].Bias);
            Assert.AreEqual(3 + 2 * (-1134 * 1), net[0][0][0]);
        }
コード例 #9
0
        public void SimpleNet_OneIter_Dropout()
        {
            // arrange

            var drate = 0.5D;
            var dseed = 1;
            var net   = Mocks.SimpleLinearNetworkWithDropout(drate, dseed);

            var sample = new ClassifiedSample <double[][, ]>();
            var point  = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;
            alg.Build();

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, alg.Values[0][0][0, 0]);
            Assert.AreEqual(33, alg.Values[1][0][0, 0]);
            Assert.AreEqual(66, alg.Values[2][0][0, 0]);
            Assert.AreEqual(-128, alg.Values[3][0][0, 0]);

            Assert.AreEqual(3, net[0].ActivationFunction.DerivativeFromValue(alg.Values[0][0][0, 0]));
            Assert.AreEqual(3, net[1].ActivationFunction.DerivativeFromValue(alg.Values[1][0][0, 0]));
            Assert.AreEqual(2, net[3].ActivationFunction.DerivativeFromValue(alg.Values[3][0][0, 0]));

            Assert.AreEqual(-129 * 2, alg.Errors[3][0][0, 0]);
            Assert.AreEqual(-258 * (-1), alg.Errors[2][0][0, 0]);
            Assert.AreEqual(258 * 3 / drate, alg.Errors[1][0][0, 0]);
            Assert.AreEqual(1548 * 3, alg.Errors[0][0][0, 0]);

            Assert.AreEqual(-258 * 66, alg.Gradient[3][0]);
            Assert.AreEqual(-258, alg.Gradient[3][1]);
            Assert.AreEqual(0, alg.Gradient[2].Length);
            Assert.AreEqual(0, alg.Gradient[2].Length);
            Assert.AreEqual(1548 * 12, alg.Gradient[1][0]);
            Assert.AreEqual(1548, alg.Gradient[1][1]);
            Assert.AreEqual(4644 * 1, alg.Gradient[0][0]);
            Assert.AreEqual(4644, alg.Gradient[0][1]);

            // act
            alg.FlushGradient();

            // assert

            Assert.AreEqual(2 + 2 * 258, net[3].Weights[1]);
            Assert.AreEqual(-1 + 2 * 258 * 66, net[3].Weights[0]);
            Assert.AreEqual(-1 + 2 * (-1548), net[1].Weights[1]);
            Assert.AreEqual(1 + 2 * (-1548 * 12), net[1].Weights[0]);
            Assert.AreEqual(1 + 2 * (-4644), net[0].Weights[1]);
            Assert.AreEqual(3 + 2 * (-4644 * 1), net[0].Weights[0]);
        }