Example #1
0
        public static BackpropAlgorithm CreateMNISTHardDemo(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateMNISTHardDemo");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: Activation.Logistic(1)));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.005D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                LossFunction          = Loss.Euclidean,
                EpochCount            = 50,
                LearningRate          = lrate,
                BatchSize             = 1,
                LearningRateScheduler = LearningRateScheduler.Constant(lrate)
            };

            return(alg);
        }
Example #2
0
        /// <summary>
        /// Error: 19.1
        /// </summary>
        public static BackpropAlgorithm CreateKaggleCatOrDogDemo_Pretrained()
        {
            Console.WriteLine("init CreateKaggleCatOrDogDemo_Pretrained");

            ConvNet net;
            var     assembly = Assembly.GetExecutingAssembly();

            using (var stream = assembly.GetManifestResourceStream("ML.DeepTests.Pretrained.cn_e16_p37.65.mld"))
            {
                net            = ConvNet.Deserialize(stream);
                net.IsTraining = true;
            }

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Example #3
0
        private void AssertNetGradient(BackpropAlgorithm alg, double[][,] data, double[] expected)
        {
            var weights = alg.Net.Weights;

            for (int i = 0; i < weights.Length; i++)
            {
                var w = weights[i];
                if (w == null)
                {
                    continue;
                }
                var g = alg.Gradient[i];

                for (int j = 0; j < w.Length; j++)
                {
                    var prev = w[j];
                    var actg = g[j];

                    AssertDerivative(x =>
                    {
                        w[j]     = x;
                        var loss = alg.FeedForward(data, expected);
                        w[j]     = prev;
                        return(loss);
                    }, prev, actg);
                }
            }
        }
Example #4
0
        public static BackpropAlgorithm CreateMainColorsDemo1()
        {
            Console.WriteLine("init CreateMainColorsDemo1");
            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 48)
            {
                IsTraining = true
            };

            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DenseLayer(outputDim: 12, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 1.1D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.0001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Example #5
0
        public void Gradient_1ConvLayer_1Iter_Euclidean()
        {
            // arrange

            var net = new ConvNet(3, 1, 1)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 1, activation: Activation.Atan));
            net._Build();
            net.RandomizeParameters(seed: 0);

            var point1 = RandomPoint(3, 1, 1);
            var point2 = RandomPoint(3, 1, 1); // just for 2 dim output
            var sample = new ClassifiedSample <double[][, ]>();

            sample[point1] = CLASSES[0];
            sample[point2] = CLASSES[1];

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.1D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            alg.RunIteration(point1, EXPECTED[0]);

            // assert
            AssertNetGradient(alg, point1, EXPECTED[0]);
        }
Example #6
0
        public static BackpropAlgorithm CreateKaggleCatOrDogFiltersDemo1_Pretrained(string fpath)
        {
            Console.WriteLine("init CreateKaggleCatOrDogFiltersDemo1_Pretrained");

            ConvNet net;
            var     assembly = Assembly.GetExecutingAssembly();

            using (var stream = System.IO.File.Open(fpath, System.IO.FileMode.Open, System.IO.FileAccess.Read))
            {
                net            = ConvNet.Deserialize(stream);
                net.IsTraining = true;
            }

            var lrate = 0.001D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Example #7
0
        private BackpropAlgorithm createBPAlg()
        {
            var net = NetworkFactory.CreateFullyConnectedNetwork(new[] { 2, 15, 3 }, Activation.Logistic(1));

            net[0].DropoutRate = 0.1D;
            net.IsTraining     = true;

            var alg = new BackpropAlgorithm(Data.TrainingSample, net);

            alg.EpochCount   = 6000;
            alg.LearningRate = 0.01D;
            alg.BatchSize    = 10;
            alg.LossFunction = Loss.Euclidean;

            int epoch = 0;

            alg.EpochEndedEvent += (o, e) =>
            {
                if (epoch++ % 300 != 0)
                {
                    return;
                }
                Console.WriteLine("----------------Epoch #: {0}", epoch);
                Console.WriteLine("L:\t{0}", alg.ErrorValue);
                Console.WriteLine("DL:\t{0}", alg.ErrorDelta);
                Console.WriteLine("DW:\t{0}", alg.Step2);
            };

            return(alg);
        }
Example #8
0
        protected override void Test()
        {
            ConvNet lenet1;
            var     fpath = Path.Combine(OutputPath, "cn_e26_p0.06.mld");

            //var fpath = @"F:\Work\git\ML\solution\ML.DigitsDemo\lenet1.mld";

            using (var stream = File.Open(fpath, FileMode.Open))
            {
                lenet1 = ConvNet.Deserialize(stream);
            }
            var alg = new BackpropAlgorithm(lenet1);

            var fout = Path.Combine(SrcPath, "result1.csv");

            using (var file = File.Open(fout, FileMode.Create, FileAccess.Write))
                using (var writer = new StreamWriter(file))
                {
                    writer.WriteLine("ImageId,Label");

                    int num = 1;
                    foreach (var data in m_Test)
                    {
                        var mark = alg.Predict(data);
                        var idx  = GeneralUtils.ArgMax(mark);
                        var cls  = m_Classes[idx];
                        writer.WriteLine("{0},{1}", num++, (int)cls.Value);
                    }

                    writer.Flush();
                }
        }
Example #9
0
        protected override void Test()
        {
            ConvNet lenet1;
            var     fpath = Path.Combine(ResultsFolder, "cn_e26_p0.06.mld");

            //var fpath = @"F:\Work\git\ML\solution\ML.DigitsDemo\lenet1.mld";

            using (var stream = File.Open(fpath, FileMode.Open))
            {
                lenet1 = ConvNet.Deserialize(stream);
            }
            var alg = new BackpropAlgorithm(m_Training, lenet1);

            var fout = Path.Combine(MnistSrc, "result1.csv");

            using (var file = File.Open(fout, FileMode.Create, FileAccess.Write))
                using (var writer = new StreamWriter(file))
                {
                    writer.WriteLine("ImageId,Label");

                    int num = 1;
                    foreach (var data in m_Test)
                    {
                        var cls = alg.Classify(data);
                        writer.WriteLine("{0},{1}", num++, (int)cls.Value);
                    }

                    writer.Flush();
                }
        }
Example #10
0
        /// <summary>
        ///
        /// </summary>
        public static BackpropAlgorithm CreateMNISTSimpleDemoWithBatching(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateMNISTSimpleDemoWithBatching");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 18, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.0001D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.RMSProp,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            return(alg);
        }
Example #11
0
        public static void SaveAlgCrushResults(BackpropAlgorithm alg, string outputPath)
        {
            var ofileName = string.Format("cn_e{0}_crush.mld", alg.Epoch);
            var ofilePath = Path.Combine(outputPath, ofileName);

            using (var stream = File.Open(ofilePath, FileMode.Create))
            {
                alg.Net.Serialize(stream);
            }
        }
Example #12
0
        public void SimpleNet_Euclidean_OneIter()
        {
            // arrange

            var net = Mocks.SimpleLinearNetwork();

            var sample = new ClassifiedSample <double[][, ]>();
            var point  = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;
            alg.Build();

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, alg.Values[0][0][0, 0]);
            Assert.AreEqual(33, alg.Values[1][0][0, 0]);
            Assert.AreEqual(-62, alg.Values[2][0][0, 0]);

            Assert.AreEqual(3, net[0].ActivationFunction.DerivativeFromValue(alg.Values[0][0][0, 0]));
            Assert.AreEqual(3, net[1].ActivationFunction.DerivativeFromValue(alg.Values[1][0][0, 0]));
            Assert.AreEqual(2, net[2].ActivationFunction.DerivativeFromValue(alg.Values[2][0][0, 0]));

            Assert.AreEqual(-126, alg.Errors[2][0][0, 0]);
            Assert.AreEqual(378, alg.Errors[1][0][0, 0]);
            Assert.AreEqual(1134, alg.Errors[0][0][0, 0]);

            Assert.AreEqual(-126 * 33, alg.Gradient[2][0]);
            Assert.AreEqual(-126, alg.Gradient[2][1]);
            Assert.AreEqual(378 * 12, alg.Gradient[1][0]);
            Assert.AreEqual(378, alg.Gradient[1][1]);
            Assert.AreEqual(1134 * 1, alg.Gradient[0][0]);
            Assert.AreEqual(1134, alg.Gradient[0][1]);

            alg.FlushGradient();

            Assert.AreEqual(-1 + 2 * 126 * 33, net[2].Weights[0]);
            Assert.AreEqual(2 + 2 * 126, net[2].Weights[1]);
            Assert.AreEqual(1 + 2 * (-378 * 12), net[1].Weights[0]);
            Assert.AreEqual(-1 + 2 * (-378), net[1].Weights[1]);
            Assert.AreEqual(3 + 2 * (-1134 * 1), net[0].Weights[0]);
            Assert.AreEqual(1 + 2 * (-1134), net[0].Weights[1]);
        }
Example #13
0
        public static void HandleEpochEnded(BackpropAlgorithm alg,
                                            MultiRegressionSample <double[][, ]> test,
                                            MultiRegressionSample <double[][, ]> train,
                                            string outputPath)
        {
            Console.WriteLine("\r------------------------------------------- Epoch #{0} ({1})                    ", alg.Epoch, DateTime.Now);
            Console.WriteLine("L:\t{0}", alg.LossValue);
            Console.WriteLine("DW:\t{0}", alg.Step2);
            Console.WriteLine("LR:\t{0}", alg.LearningRate);

            double?pct = null;

            if (test == null || !test.Any())
            {
                Console.WriteLine("Test: none");
            }
            else
            {
                var terrors = alg.GetErrors(test, 0, true);
                var tec     = terrors.Count();
                var tdc     = test.Count;
                var tpct    = Math.Round(100.0F * tec / tdc, 2);
                Console.WriteLine("Test: {0} of {1} ({2}%)", tec, tdc, tpct);

                pct = tpct;
            }

            if (train == null || !train.Any())
            {
                Console.WriteLine("Train: none");
            }
            else
            {
                var verrors = alg.GetErrors(train, 0, true);
                var vec     = verrors.Count();
                var vdc     = train.Count;
                var vpct    = Math.Round(100.0F * vec / vdc, 2);
                Console.WriteLine("Train: {0} of {1} ({2}%)", vec, vdc, vpct);

                if (!pct.HasValue)
                {
                    pct = vpct;
                }
            }

            var ofileName = string.Format("cn_e{0}_p{1}.mld", alg.Epoch, Math.Round(pct.Value, 2));
            var ofilePath = Path.Combine(outputPath, ofileName);

            using (var stream = File.Open(ofilePath, FileMode.Create))
            {
                alg.Net.Serialize(stream);
            }
        }
Example #14
0
        public void Gradient_DifferentLayers_1Iter_CrossEntropy_Regularization()
        {
            // arrange

            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 5)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 3, padding: 1));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2, activation: Activation.Exp));
            net.AddLayer(new ActivationLayer(activation: Activation.Tanh));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 3, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var sample = new ClassifiedSample <double[][, ]>();

            for (int i = 0; i < 3; i++)
            {
                var point = RandomPoint(1, 5, 5);
                sample[point] = new Class(i.ToString(), i);
            }

            var regularizator = Regularizator.Composite(Regularizator.L1(0.1D), Regularizator.L2(0.3D));
            var alg           = new BackpropAlgorithm(net)
            {
                LearningRate  = 0.1D,
                LossFunction  = Loss.CrossEntropySoftMax,
                Regularizator = regularizator
            };

            alg.Build();

            // act
            var data     = sample.First();
            var expected = new double[3] {
                1.0D, 0.0D, 0.0D
            };

            alg.RunIteration(data.Key, expected);
            regularizator.Apply(alg.Gradient, alg.Net.Weights);
            ((DropoutLayer)alg.Net[4]).ApplyCustomMask = true;

            // assert
            AssertNetGradient(alg, data.Key, expected);
        }
Example #15
0
        public static void HandleBatchEnded(BackpropAlgorithm alg, int trainCount, DateTime tstart)
        {
            var now     = DateTime.Now;
            var iter    = alg.Iteration;
            var pct     = Math.Min(100 * iter / (float)trainCount, 100);
            var elapsed = TimeSpan.FromMinutes((now - tstart).TotalMinutes * (trainCount - iter) / iter);

            Console.Write("\rCurrent epoch progress: {0:0.00}%. Left {1:00}m {2:00}s.  L={3:0.0000}         ",
                          pct,
                          elapsed.Minutes,
                          elapsed.Seconds,
                          alg.LossValue);
        }
Example #16
0
        private void AssertNetGradient(BackpropAlgorithm alg, double[] point, int lidx, int nidx, int widx)
        {
            var net  = alg.Net;
            var loss = alg.LossFunction;
            var prev = net[lidx][nidx][widx];
            var grad = alg.Gradient[lidx][nidx, widx];

            AssertDerivative(x =>
            {
                net[lidx][nidx][widx] = x;
                var res = net.Calculate(point)[0];
                net[lidx][nidx][widx] = prev;
                return(loss.Value(new[] { res }, new[] { 1.0D }));
            }, prev, grad);
        }
Example #17
0
        public void Gradient_MNISTSimple_1Iter()
        {
            // arrange

            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 14)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 4, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));

            net._Build();

            Randomize(net.Weights, -1.0D, 1.0D);

            var sample = new ClassifiedSample <double[][, ]>();

            for (int i = 0; i < 10; i++)
            {
                var point = RandomPoint(1, 14, 14);
                sample[point] = new Class(i.ToString(), i);
            }

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.005D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            var data     = sample.First();
            var expected = new double[10] {
                1.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D
            };

            alg.RunIteration(data.Key, expected);

            // assert
            AssertNetGradient(alg, data.Key, expected);
        }
Example #18
0
        /// <summary>
        /// Error 21.65
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Trunc2ClassesDemo2_SEALED()
        {
            Console.WriteLine("init CreateCIFAR10Trunc2ClassesDemo2");

            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 16, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 16, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new FlattenLayer(outputDim: 256, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Example #19
0
        // https://code.google.com/archive/p/cuda-convnet/   - CIFAR archtectures+errors

        /// <summary>
        /// Creates CNN for CIFAR-10 training (from https://cs.stanford.edu/people/karpathy/convnetjs/demo/cifar10.html)
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Demo2()
        {
            Console.WriteLine("init CreateCIFAR10Demo2");
            var activation = Activation.LeakyReLU();
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 40, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 60, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new FlattenLayer(outputDim: 1024, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 1024, activation: activation));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.05D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.Euclidean,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                LearningRateScheduler   = LearningRateScheduler.TimeBased(lrate, 0.005D)
            };

            alg.Build();

            return(alg);
        }
Example #20
0
        /// <summary>
        /// Creates CNN for CIFAR-10 training (from keras)
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Demo1(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateCIFAR10Demo1");
            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new FlattenLayer(outputDim: 512, activation: Activation.ReLU));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: Activation.Logistic(1)));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            return(alg);
        }
Example #21
0
        public void SimpleNet_CrossEntropySoftMax_OneIter()
        {
            // arrange

            var net = Mocks.SimpleLinearNetwork2(Activation.ReLU);

            net[2].ActivationFunction = Activation.Logistic(1);

            var sample = new ClassifiedSample <double[][, ]>();
            var point1 = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };
            var point2 = new double[1][, ] {
                new[, ] {
                    { -1.0D }
                }
            };
            var cls1 = new Class("a", 0);
            var cls2 = new Class("b", 1);

            sample[point1] = cls1;
            sample[point2] = cls2;

            var alg = new BackpropAlgorithm(sample, net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.CrossEntropySoftMax;
            alg.Build();

            // act
            alg.RunIteration(point1, cls1);

            // assert
            AssertNetGradient(alg, point1, 2, 1);
            AssertNetGradient(alg, point1, 1, 0);
            AssertNetGradient(alg, point1, 1, 1);
            AssertNetGradient(alg, point1, 0, 0);
            AssertNetGradient(alg, point1, 0, 1);
        }
Example #22
0
        /// <summary>
        /// Error = 0.92
        /// </summary>
        public static BackpropAlgorithm CreateMNISTSimpleDemo_SEALED()
        {
            Console.WriteLine("init CreateMNISTSimpleDemo_SEALED");
            var activation = Activation.LeakyReLU();
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 12, windowSize: 5, padding: 2));
            net.AddLayer(new ConvLayer(outputDepth: 12, windowSize: 5, padding: 2));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 24, windowSize: 5, padding: 2));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 32, activation: activation));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.001D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 4,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.RMSProp,
                Regularizator           = Regularizator.L2(0.0001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Example #23
0
        public static BackpropAlgorithm CreateMNISTHardDemo()
        {
            Console.WriteLine("init CreateMNISTHardDemo");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.005D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.CrossEntropySoftMax,
                Optimizer               = Optimizer.RMSProp,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Example #24
0
        public static void HandleEpochEnded(BackpropAlgorithm alg, ClassifiedSample <double[][, ]> test, string outputPath)
        {
            Console.WriteLine("---------------- Epoch #: {0} ({1})", alg.Epoch, DateTime.Now);
            Console.WriteLine("L:\t{0}", alg.LossValue);
            Console.WriteLine("DW:\t{0}", alg.Step2);
            Console.WriteLine("LR:\t{0}", alg.LearningRate);
            Console.WriteLine("Errors:");

            var errors = alg.GetErrors(test);
            var ec     = errors.Count();
            var dc     = test.Count;
            var pct    = Math.Round(100.0F * ec / dc, 2);

            Console.WriteLine("{0} of {1} ({2}%)", ec, dc, pct);

            var ofileName = string.Format("cn_e{0}_p{1}.mld", alg.Epoch, Math.Round(pct, 2));
            var ofilePath = Path.Combine(outputPath, ofileName);

            using (var stream = File.Open(ofilePath, FileMode.Create))
            {
                alg.Net.Serialize(stream);
            }
        }
Example #25
0
        public void Gradient_SimpleDropout_1Iter_Euclidean()
        {
            // arrange

            var net = new ConvNet(3, 1)
            {
                IsTraining = true
            };

            net.AddLayer(new DenseLayer(outputDim: 10, activation: Activation.Atan));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: Activation.Atan));
            net._Build();
            net.RandomizeParameters(seed: 0);

            var point1 = RandomPoint(3, 1, 1);
            var point2 = RandomPoint(3, 1, 1); // just for 2 dim output
            var sample = new ClassifiedSample <double[][, ]>();

            sample[point1] = CLASSES[0];
            sample[point2] = CLASSES[1];

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.1D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            alg.RunIteration(point1, EXPECTED[0]);
            ((DropoutLayer)alg.Net[1]).ApplyCustomMask = true;

            // assert
            AssertNetGradient(alg, point1, EXPECTED[0]);
        }
Example #26
0
        public void SimpleNet_OneIter_Dropout()
        {
            // arrange

            var drate = 0.5D;
            var dseed = 1;
            var net   = Mocks.SimpleLinearNetworkWithDropout(drate, dseed);

            var sample = new ClassifiedSample <double[]>();
            var point  = new[] { 1.0D };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, net[0][0].Value);
            Assert.AreEqual(66, net[1][0].Value);
            Assert.AreEqual(-128, net[2][0].Value);

            Assert.AreEqual(3, net[0][0].Derivative);
            Assert.AreEqual(3 / drate, net[1][0].Derivative);
            Assert.AreEqual(2, net[2][0].Derivative);

            Assert.AreEqual(-129 * 2, alg.Errors[2][0]);
            Assert.AreEqual(-1 * (-258) * 3 / drate, alg.Errors[1][0]);
            Assert.AreEqual(1548 * 3, alg.Errors[0][0]);

            Assert.AreEqual(-258 * 66, alg.Gradient[2][0, 0]);
            AssertNetGradient(alg, point, 2, 0, 0);

            Assert.AreEqual(-258, alg.Gradient[2][0, 1]);
            AssertNetGradient(alg, point, 2, 0, 1);

            Assert.AreEqual(1548 * 12, alg.Gradient[1][0, 0]);
            AssertNetGradient(alg, point, 1, 0, 0);

            Assert.AreEqual(1548, alg.Gradient[1][0, 1]);
            AssertNetGradient(alg, point, 1, 0, 1);

            Assert.AreEqual(4644 * 1, alg.Gradient[0][0, 0]);
            AssertNetGradient(alg, point, 0, 0, 0);

            Assert.AreEqual(4644, alg.Gradient[0][0, 1]);
            AssertNetGradient(alg, point, 0, 0, 1);


            // act
            alg.FlushGradient();

            // assert

            Assert.AreEqual(2 + 2 * 258, net[2][0].Bias);
            Assert.AreEqual(-1 + 2 * 258 * 66, net[2][0][0]);
            Assert.AreEqual(-1 + 2 * (-1548), net[1][0].Bias);
            Assert.AreEqual(1 + 2 * (-1548 * 12), net[1][0][0]);
            Assert.AreEqual(1 + 2 * (-4644), net[0][0].Bias);
            Assert.AreEqual(3 + 2 * (-4644 * 1), net[0][0][0]);
        }
Example #27
0
        public void SimpleNet_OneIter()
        {
            // arrange

            var net = Mocks.SimpleLinearNetwork();

            var sample = new ClassifiedSample <double[]>();
            var point  = new[] { 1.0D };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, net[0][0].Value);
            Assert.AreEqual(33, net[1][0].Value);
            Assert.AreEqual(-62, net[2][0].Value);

            Assert.AreEqual(3, net[0][0].Derivative);
            Assert.AreEqual(3, net[1][0].Derivative);
            Assert.AreEqual(2, net[2][0].Derivative);

            Assert.AreEqual(-126, alg.Errors[2][0]);
            Assert.AreEqual(378, alg.Errors[1][0]);
            Assert.AreEqual(1134, alg.Errors[0][0]);

            Assert.AreEqual(-126 * 33, alg.Gradient[2][0, 0]);
            AssertNetGradient(alg, point, 2, 0, 0);

            Assert.AreEqual(-126, alg.Gradient[2][0, 1]);
            AssertNetGradient(alg, point, 2, 0, 1);

            Assert.AreEqual(378 * 12, alg.Gradient[1][0, 0]);
            AssertNetGradient(alg, point, 1, 0, 0);

            Assert.AreEqual(378, alg.Gradient[1][0, 1]);
            AssertNetGradient(alg, point, 1, 0, 1);

            Assert.AreEqual(1134 * 1, alg.Gradient[0][0, 0]);
            AssertNetGradient(alg, point, 0, 0, 0);

            Assert.AreEqual(1134, alg.Gradient[0][0, 1]);
            AssertNetGradient(alg, point, 0, 0, 1);


            // act
            alg.FlushGradient();

            // assert

            Assert.AreEqual(2 + 2 * 126, net[2][0].Bias);
            Assert.AreEqual(-1 + 2 * 126 * 33, net[2][0][0]);
            Assert.AreEqual(-1 + 2 * (-378), net[1][0].Bias);
            Assert.AreEqual(1 + 2 * (-378 * 12), net[1][0][0]);
            Assert.AreEqual(1 + 2 * (-1134), net[0][0].Bias);
            Assert.AreEqual(3 + 2 * (-1134 * 1), net[0][0][0]);
        }
Example #28
0
        private void onTestButtonClick(object sender, RoutedEventArgs e)
        {
            var path     = @"F:\Work\science\Machine learning\data\cat-dog\train\kaggle";
            var errors1  = 0;
            var errors1C = 0;
            var errors1D = 0;
            var errors2  = 0;
            var errors2C = 0;
            var errors2D = 0;
            var errorsC1 = 0;
            var errorsC2 = 0;
            var errorsR  = 0;
            var pct1     = 0;
            var pct1C    = 0;
            var pct1D    = 0;
            var pct2     = 0;
            var pct2C    = 0;
            var pct2D    = 0;
            var pctC     = 0;
            var pctC1    = 0;
            var pctC2    = 0;
            var pctR     = 0;
            var alp1     = 0.95D;
            var alp2     = 0.05D;
            var dir      = new DirectoryInfo(path);
            var total    = dir.GetFiles().Length;

            var sample = new MultiRegressionSample <double[][, ]>();
            var cat    = new double[] { 1.0D, 0.0D };
            var dog    = new double[] { 0.0D, 1.0D };

            int cnt = 0;

            foreach (var file in dir.EnumerateFiles().Shuffle(0).Skip(10000).Take(500))
            {
                var fname    = Path.GetFileNameWithoutExtension(file.Name);
                var expected = fname.StartsWith("cat.") ? 0 : 1;
                var data1    = getNetData(file.FullName);
                double[][,] data2;
                using (var image = (Bitmap)System.Drawing.Image.FromFile(file.FullName))
                    data2 = getNetFData(image);

                sample.Add(data2, expected == 0 ? cat : dog);

                var result1 = m_Network.Calculate(data1).Select(d => d[0, 0]).ToArray();
                var actual1 = ML.Core.Mathematics.MathUtils.ArgMax(result1);
                if (expected != actual1)
                {
                    if (expected == 0)
                    {
                        errors1C++;
                    }
                    else
                    {
                        errors1D++;
                    }
                    errors1++;
                }

                var result2 = m_NetworkF.Calculate(data2).Select(d => d[0, 0]).ToArray();
                var actual2 = ML.Core.Mathematics.MathUtils.ArgMax(result2);
                if (expected != actual2)
                {
                    if (expected == 0)
                    {
                        errors2C++;
                    }
                    else
                    {
                        errors2D++;
                    }
                    errors2++;
                }

                var resultR = new double[result1.Length];
                resultR[0] = alp1 * result1[0] + (1 - alp1) * result2[0];
                resultR[1] = alp2 * result1[1] + (1 - alp2) * result2[1];
                var actualR = ML.Core.Mathematics.MathUtils.ArgMax(resultR);
                if (expected != actualR)
                {
                    errorsR++;
                }

                if ((expected != actual1) && (expected != actual2))
                {
                    if (expected == 0)
                    {
                        errorsC1++;
                    }
                    else
                    {
                        errorsC2++;
                    }
                }

                cnt++;
                pct1  = errors1 * 100 / cnt;
                pct2  = errors2 * 100 / cnt;
                pctC1 = errorsC1 * 100 / cnt;
                pctC2 = errorsC2 * 100 / cnt;
                pctC  = (errorsC1 + errorsC2) * 100 / cnt;
                pctR  = errorsR * 100 / cnt;
            }

            var alg = new BackpropAlgorithm(m_NetworkF);
            var err = alg.GetErrors(sample, 0, true);

            var message = "Errors1: {0}%, Errors2: {1}%, ErrorsC: {2}%, ErrorR: {3}%";

            MessageBox.Show(string.Format(message, pct1, pct2, pctC, pctR));
        }
Example #29
0
        public void SimpleNet_OneIter_Dropout()
        {
            // arrange

            var drate = 0.5D;
            var dseed = 1;
            var net   = Mocks.SimpleLinearNetworkWithDropout(drate, dseed);

            var sample = new ClassifiedSample <double[][, ]>();
            var point  = new double[1][, ] {
                new[, ] {
                    { 1.0D }
                }
            };

            sample[point] = new Class("a", 0);

            var alg = new BackpropAlgorithm(net);

            alg.LearningRate = 2.0D;
            alg.LossFunction = Loss.Euclidean;
            alg.Build();

            // act
            alg.RunIteration(point, new double[] { 1.0D });

            // assert

            Assert.AreEqual(12, alg.Values[0][0][0, 0]);
            Assert.AreEqual(33, alg.Values[1][0][0, 0]);
            Assert.AreEqual(66, alg.Values[2][0][0, 0]);
            Assert.AreEqual(-128, alg.Values[3][0][0, 0]);

            Assert.AreEqual(3, net[0].ActivationFunction.DerivativeFromValue(alg.Values[0][0][0, 0]));
            Assert.AreEqual(3, net[1].ActivationFunction.DerivativeFromValue(alg.Values[1][0][0, 0]));
            Assert.AreEqual(2, net[3].ActivationFunction.DerivativeFromValue(alg.Values[3][0][0, 0]));

            Assert.AreEqual(-129 * 2, alg.Errors[3][0][0, 0]);
            Assert.AreEqual(-258 * (-1), alg.Errors[2][0][0, 0]);
            Assert.AreEqual(258 * 3 / drate, alg.Errors[1][0][0, 0]);
            Assert.AreEqual(1548 * 3, alg.Errors[0][0][0, 0]);

            Assert.AreEqual(-258 * 66, alg.Gradient[3][0]);
            Assert.AreEqual(-258, alg.Gradient[3][1]);
            Assert.AreEqual(0, alg.Gradient[2].Length);
            Assert.AreEqual(0, alg.Gradient[2].Length);
            Assert.AreEqual(1548 * 12, alg.Gradient[1][0]);
            Assert.AreEqual(1548, alg.Gradient[1][1]);
            Assert.AreEqual(4644 * 1, alg.Gradient[0][0]);
            Assert.AreEqual(4644, alg.Gradient[0][1]);

            // act
            alg.FlushGradient();

            // assert

            Assert.AreEqual(2 + 2 * 258, net[3].Weights[1]);
            Assert.AreEqual(-1 + 2 * 258 * 66, net[3].Weights[0]);
            Assert.AreEqual(-1 + 2 * (-1548), net[1].Weights[1]);
            Assert.AreEqual(1 + 2 * (-1548 * 12), net[1].Weights[0]);
            Assert.AreEqual(1 + 2 * (-4644), net[0].Weights[1]);
            Assert.AreEqual(3 + 2 * (-4644 * 1), net[0].Weights[0]);
        }