Esempio n. 1
0
        /// <summary>
        /// Error: 19.1
        /// </summary>
        public static BackpropAlgorithm CreateKaggleCatOrDogDemo_Pretrained()
        {
            Console.WriteLine("init CreateKaggleCatOrDogDemo_Pretrained");

            ConvNet net;
            var     assembly = Assembly.GetExecutingAssembly();

            using (var stream = assembly.GetManifestResourceStream("ML.DeepTests.Pretrained.cn_e16_p37.65.mld"))
            {
                net            = ConvNet.Deserialize(stream);
                net.IsTraining = true;
            }

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Esempio n. 2
0
        public static BackpropAlgorithm CreateKaggleCatOrDogFiltersDemo1_Pretrained(string fpath)
        {
            Console.WriteLine("init CreateKaggleCatOrDogFiltersDemo1_Pretrained");

            ConvNet net;
            var     assembly = Assembly.GetExecutingAssembly();

            using (var stream = System.IO.File.Open(fpath, System.IO.FileMode.Open, System.IO.FileAccess.Read))
            {
                net            = ConvNet.Deserialize(stream);
                net.IsTraining = true;
            }

            var lrate = 0.001D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Esempio n. 3
0
        public static BackpropAlgorithm CreateMainColorsDemo1()
        {
            Console.WriteLine("init CreateMainColorsDemo1");
            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 48)
            {
                IsTraining = true
            };

            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DenseLayer(outputDim: 12, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 1.1D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.0001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Esempio n. 4
0
        public void Gradient_1ConvLayer_1Iter_Euclidean()
        {
            // arrange

            var net = new ConvNet(3, 1, 1)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 1, activation: Activation.Atan));
            net._Build();
            net.RandomizeParameters(seed: 0);

            var point1 = RandomPoint(3, 1, 1);
            var point2 = RandomPoint(3, 1, 1); // just for 2 dim output
            var sample = new ClassifiedSample <double[][, ]>();

            sample[point1] = CLASSES[0];
            sample[point2] = CLASSES[1];

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.1D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            alg.RunIteration(point1, EXPECTED[0]);

            // assert
            AssertNetGradient(alg, point1, EXPECTED[0]);
        }
Esempio n. 5
0
        public void SimpleNet_Euclidean_OneIter()
        {
            // arrange

            var net = Mocks.SimpleLinearNetwork();

            var sample = new ClassifiedSample <double[][, ]>();
            var point  = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;
            alg.Build();

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, alg.Values[0][0][0, 0]);
            Assert.AreEqual(33, alg.Values[1][0][0, 0]);
            Assert.AreEqual(-62, alg.Values[2][0][0, 0]);

            Assert.AreEqual(3, net[0].ActivationFunction.DerivativeFromValue(alg.Values[0][0][0, 0]));
            Assert.AreEqual(3, net[1].ActivationFunction.DerivativeFromValue(alg.Values[1][0][0, 0]));
            Assert.AreEqual(2, net[2].ActivationFunction.DerivativeFromValue(alg.Values[2][0][0, 0]));

            Assert.AreEqual(-126, alg.Errors[2][0][0, 0]);
            Assert.AreEqual(378, alg.Errors[1][0][0, 0]);
            Assert.AreEqual(1134, alg.Errors[0][0][0, 0]);

            Assert.AreEqual(-126 * 33, alg.Gradient[2][0]);
            Assert.AreEqual(-126, alg.Gradient[2][1]);
            Assert.AreEqual(378 * 12, alg.Gradient[1][0]);
            Assert.AreEqual(378, alg.Gradient[1][1]);
            Assert.AreEqual(1134 * 1, alg.Gradient[0][0]);
            Assert.AreEqual(1134, alg.Gradient[0][1]);

            alg.FlushGradient();

            Assert.AreEqual(-1 + 2 * 126 * 33, net[2].Weights[0]);
            Assert.AreEqual(2 + 2 * 126, net[2].Weights[1]);
            Assert.AreEqual(1 + 2 * (-378 * 12), net[1].Weights[0]);
            Assert.AreEqual(-1 + 2 * (-378), net[1].Weights[1]);
            Assert.AreEqual(3 + 2 * (-1134 * 1), net[0].Weights[0]);
            Assert.AreEqual(1 + 2 * (-1134), net[0].Weights[1]);
        }
Esempio n. 6
0
        public void Gradient_DifferentLayers_1Iter_CrossEntropy_Regularization()
        {
            // arrange

            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 5)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 3, padding: 1));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2, activation: Activation.Exp));
            net.AddLayer(new ActivationLayer(activation: Activation.Tanh));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 3, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var sample = new ClassifiedSample <double[][, ]>();

            for (int i = 0; i < 3; i++)
            {
                var point = RandomPoint(1, 5, 5);
                sample[point] = new Class(i.ToString(), i);
            }

            var regularizator = Regularizator.Composite(Regularizator.L1(0.1D), Regularizator.L2(0.3D));
            var alg           = new BackpropAlgorithm(net)
            {
                LearningRate  = 0.1D,
                LossFunction  = Loss.CrossEntropySoftMax,
                Regularizator = regularizator
            };

            alg.Build();

            // act
            var data     = sample.First();
            var expected = new double[3] {
                1.0D, 0.0D, 0.0D
            };

            alg.RunIteration(data.Key, expected);
            regularizator.Apply(alg.Gradient, alg.Net.Weights);
            ((DropoutLayer)alg.Net[4]).ApplyCustomMask = true;

            // assert
            AssertNetGradient(alg, data.Key, expected);
        }
Esempio n. 7
0
        public void Gradient_MNISTSimple_1Iter()
        {
            // arrange

            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 14)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 4, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));

            net._Build();

            Randomize(net.Weights, -1.0D, 1.0D);

            var sample = new ClassifiedSample <double[][, ]>();

            for (int i = 0; i < 10; i++)
            {
                var point = RandomPoint(1, 14, 14);
                sample[point] = new Class(i.ToString(), i);
            }

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.005D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            var data     = sample.First();
            var expected = new double[10] {
                1.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D
            };

            alg.RunIteration(data.Key, expected);

            // assert
            AssertNetGradient(alg, data.Key, expected);
        }
Esempio n. 8
0
        /// <summary>
        /// Error 21.65
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Trunc2ClassesDemo2_SEALED()
        {
            Console.WriteLine("init CreateCIFAR10Trunc2ClassesDemo2");

            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 16, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 16, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new FlattenLayer(outputDim: 256, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Esempio n. 9
0
        // https://code.google.com/archive/p/cuda-convnet/   - CIFAR archtectures+errors

        /// <summary>
        /// Creates CNN for CIFAR-10 training (from https://cs.stanford.edu/people/karpathy/convnetjs/demo/cifar10.html)
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Demo2()
        {
            Console.WriteLine("init CreateCIFAR10Demo2");
            var activation = Activation.LeakyReLU();
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 40, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 60, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new FlattenLayer(outputDim: 1024, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 1024, activation: activation));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.05D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.Euclidean,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                LearningRateScheduler   = LearningRateScheduler.TimeBased(lrate, 0.005D)
            };

            alg.Build();

            return(alg);
        }
Esempio n. 10
0
        public void SimpleNet_CrossEntropySoftMax_OneIter()
        {
            // arrange

            var net = Mocks.SimpleLinearNetwork2(Activation.ReLU);

            net[2].ActivationFunction = Activation.Logistic(1);

            var sample = new ClassifiedSample <double[][, ]>();
            var point1 = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };
            var point2 = new double[1][, ] {
                new[, ] {
                    { -1.0D }
                }
            };
            var cls1 = new Class("a", 0);
            var cls2 = new Class("b", 1);

            sample[point1] = cls1;
            sample[point2] = cls2;

            var alg = new BackpropAlgorithm(sample, net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.CrossEntropySoftMax;
            alg.Build();

            // act
            alg.RunIteration(point1, cls1);

            // assert
            AssertNetGradient(alg, point1, 2, 1);
            AssertNetGradient(alg, point1, 1, 0);
            AssertNetGradient(alg, point1, 1, 1);
            AssertNetGradient(alg, point1, 0, 0);
            AssertNetGradient(alg, point1, 0, 1);
        }
Esempio n. 11
0
        /// <summary>
        /// Error = 0.92
        /// </summary>
        public static BackpropAlgorithm CreateMNISTSimpleDemo_SEALED()
        {
            Console.WriteLine("init CreateMNISTSimpleDemo_SEALED");
            var activation = Activation.LeakyReLU();
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 12, windowSize: 5, padding: 2));
            net.AddLayer(new ConvLayer(outputDepth: 12, windowSize: 5, padding: 2));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 24, windowSize: 5, padding: 2));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 32, activation: activation));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.001D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 4,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.RMSProp,
                Regularizator           = Regularizator.L2(0.0001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Esempio n. 12
0
        public static BackpropAlgorithm CreateMNISTHardDemo()
        {
            Console.WriteLine("init CreateMNISTHardDemo");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.005D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.CrossEntropySoftMax,
                Optimizer               = Optimizer.RMSProp,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Esempio n. 13
0
        /// <summary>
        /// Creates CNN for CIFAR-10 training
        /// (from http://machinelearningmastery.com/object-recognition-convolutional-neural-networks-keras-deep-learning-library/)
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Demo3()
        {
            Console.WriteLine("init CreateCIFAR10Demo3");
            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new DropoutLayer(0.2D));
            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new FlattenLayer(outputDim: 512, activation: Activation.ReLU));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: Activation.Logistic(1)));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Momentum,
                LearningRateScheduler   = LearningRateScheduler.TimeBased(lrate, 0.0005D)
            };

            alg.Build();

            return(alg);
        }
Esempio n. 14
0
        public void Gradient_SimpleDropout_1Iter_Euclidean()
        {
            // arrange

            var net = new ConvNet(3, 1)
            {
                IsTraining = true
            };

            net.AddLayer(new DenseLayer(outputDim: 10, activation: Activation.Atan));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: Activation.Atan));
            net._Build();
            net.RandomizeParameters(seed: 0);

            var point1 = RandomPoint(3, 1, 1);
            var point2 = RandomPoint(3, 1, 1); // just for 2 dim output
            var sample = new ClassifiedSample <double[][, ]>();

            sample[point1] = CLASSES[0];
            sample[point2] = CLASSES[1];

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.1D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            alg.RunIteration(point1, EXPECTED[0]);
            ((DropoutLayer)alg.Net[1]).ApplyCustomMask = true;

            // assert
            AssertNetGradient(alg, point1, EXPECTED[0]);
        }
Esempio n. 15
0
        public void SimpleNet_OneIter_Dropout()
        {
            // arrange

            var drate = 0.5D;
            var dseed = 1;
            var net   = Mocks.SimpleLinearNetworkWithDropout(drate, dseed);

            var sample = new ClassifiedSample <double[][, ]>();
            var point  = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;
            alg.Build();

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, alg.Values[0][0][0, 0]);
            Assert.AreEqual(33, alg.Values[1][0][0, 0]);
            Assert.AreEqual(66, alg.Values[2][0][0, 0]);
            Assert.AreEqual(-128, alg.Values[3][0][0, 0]);

            Assert.AreEqual(3, net[0].ActivationFunction.DerivativeFromValue(alg.Values[0][0][0, 0]));
            Assert.AreEqual(3, net[1].ActivationFunction.DerivativeFromValue(alg.Values[1][0][0, 0]));
            Assert.AreEqual(2, net[3].ActivationFunction.DerivativeFromValue(alg.Values[3][0][0, 0]));

            Assert.AreEqual(-129 * 2, alg.Errors[3][0][0, 0]);
            Assert.AreEqual(-258 * (-1), alg.Errors[2][0][0, 0]);
            Assert.AreEqual(258 * 3 / drate, alg.Errors[1][0][0, 0]);
            Assert.AreEqual(1548 * 3, alg.Errors[0][0][0, 0]);

            Assert.AreEqual(-258 * 66, alg.Gradient[3][0]);
            Assert.AreEqual(-258, alg.Gradient[3][1]);
            Assert.AreEqual(0, alg.Gradient[2].Length);
            Assert.AreEqual(0, alg.Gradient[2].Length);
            Assert.AreEqual(1548 * 12, alg.Gradient[1][0]);
            Assert.AreEqual(1548, alg.Gradient[1][1]);
            Assert.AreEqual(4644 * 1, alg.Gradient[0][0]);
            Assert.AreEqual(4644, alg.Gradient[0][1]);

            // act
            alg.FlushGradient();

            // assert

            Assert.AreEqual(2 + 2 * 258, net[3].Weights[1]);
            Assert.AreEqual(-1 + 2 * 258 * 66, net[3].Weights[0]);
            Assert.AreEqual(-1 + 2 * (-1548), net[1].Weights[1]);
            Assert.AreEqual(1 + 2 * (-1548 * 12), net[1].Weights[0]);
            Assert.AreEqual(1 + 2 * (-4644), net[0].Weights[1]);
            Assert.AreEqual(3 + 2 * (-4644 * 1), net[0].Weights[0]);
        }