Exemplo n.º 1
0
        public static BackpropAlgorithm CreateKaggleCatOrDogFiltersDemo1_Pretrained(string fpath)
        {
            Console.WriteLine("init CreateKaggleCatOrDogFiltersDemo1_Pretrained");

            ConvNet net;
            var     assembly = Assembly.GetExecutingAssembly();

            using (var stream = System.IO.File.Open(fpath, System.IO.FileMode.Open, System.IO.FileAccess.Read))
            {
                net            = ConvNet.Deserialize(stream);
                net.IsTraining = true;
            }

            var lrate = 0.001D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Exemplo n.º 2
0
        public static BackpropAlgorithm CreateMainColorsDemo1()
        {
            Console.WriteLine("init CreateMainColorsDemo1");
            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 48)
            {
                IsTraining = true
            };

            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DenseLayer(outputDim: 12, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 1.1D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.0001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Exemplo n.º 3
0
        public static BackpropAlgorithm CreateMNISTHardDemo(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateMNISTHardDemo");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: Activation.Logistic(1)));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.005D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                LossFunction          = Loss.Euclidean,
                EpochCount            = 50,
                LearningRate          = lrate,
                BatchSize             = 1,
                LearningRateScheduler = LearningRateScheduler.Constant(lrate)
            };

            return(alg);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Error: 19.1
        /// </summary>
        public static BackpropAlgorithm CreateKaggleCatOrDogDemo_Pretrained()
        {
            Console.WriteLine("init CreateKaggleCatOrDogDemo_Pretrained");

            ConvNet net;
            var     assembly = Assembly.GetExecutingAssembly();

            using (var stream = assembly.GetManifestResourceStream("ML.DeepTests.Pretrained.cn_e16_p37.65.mld"))
            {
                net            = ConvNet.Deserialize(stream);
                net.IsTraining = true;
            }

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Exemplo n.º 5
0
        /// <summary>
        ///
        /// </summary>
        public static BackpropAlgorithm CreateMNISTSimpleDemoWithBatching(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateMNISTSimpleDemoWithBatching");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 18, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.0001D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.RMSProp,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            return(alg);
        }
Exemplo n.º 6
0
        /// <summary>
        /// Error 21.65
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Trunc2ClassesDemo2_SEALED()
        {
            Console.WriteLine("init CreateCIFAR10Trunc2ClassesDemo2");

            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 16, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 16, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new FlattenLayer(outputDim: 256, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Exemplo n.º 7
0
        // https://code.google.com/archive/p/cuda-convnet/   - CIFAR archtectures+errors

        /// <summary>
        /// Creates CNN for CIFAR-10 training (from https://cs.stanford.edu/people/karpathy/convnetjs/demo/cifar10.html)
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Demo2()
        {
            Console.WriteLine("init CreateCIFAR10Demo2");
            var activation = Activation.LeakyReLU();
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 40, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 60, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new FlattenLayer(outputDim: 1024, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 1024, activation: activation));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.05D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.Euclidean,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                LearningRateScheduler   = LearningRateScheduler.TimeBased(lrate, 0.005D)
            };

            alg.Build();

            return(alg);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Creates CNN for CIFAR-10 training (from keras)
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Demo1(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateCIFAR10Demo1");
            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new FlattenLayer(outputDim: 512, activation: Activation.ReLU));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: Activation.Logistic(1)));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            return(alg);
        }
Exemplo n.º 9
0
        /// <summary>
        /// Error = 0.92
        /// </summary>
        public static BackpropAlgorithm CreateMNISTSimpleDemo_SEALED()
        {
            Console.WriteLine("init CreateMNISTSimpleDemo_SEALED");
            var activation = Activation.LeakyReLU();
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 12, windowSize: 5, padding: 2));
            net.AddLayer(new ConvLayer(outputDepth: 12, windowSize: 5, padding: 2));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 24, windowSize: 5, padding: 2));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 32, activation: activation));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.001D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 4,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.RMSProp,
                Regularizator           = Regularizator.L2(0.0001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Exemplo n.º 10
0
        public static BackpropAlgorithm CreateMNISTHardDemo()
        {
            Console.WriteLine("init CreateMNISTHardDemo");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.005D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.CrossEntropySoftMax,
                Optimizer               = Optimizer.RMSProp,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Exemplo n.º 11
0
        public IEnumerable <TrainingSession> GetIterator(int maxIteration = int.MaxValue)
        {
            _stopwatch = Stopwatch.StartNew();
            _stop      = false;

            Epoch            = 1;
            EpochIncremented = false;

            if (LearningRateScheduler != null)
            {
                Learner.ResetLearningRate(new TrainingParameterScheduleDouble(LearningRateScheduler.LearningRate));
            }

            for (Iteration = 1; Iteration <= maxIteration; ++Iteration)
            {
                var minibatch = Sampler.GetNextMinibatch(TrainingDevice);
                if (minibatch == null)
                {
                    break;
                }

                DataNameToInputMap.InitializeByMinibatch(minibatch);

                var arguments = DataNameToInputMap.GetVariableValueMap(minibatch);

                Trainer.TrainMinibatch(arguments, minibatch.SweepEnd, TrainingDevice);

                SampleCount = (int)Trainer.PreviousMinibatchSampleCount();
                Loss        = Trainer.PreviousMinibatchLossAverage();
                if (Trainer.EvaluationFunction() != null)
                {
                    Metric = Trainer.PreviousMinibatchEvaluationAverage();
                }

                foreach (var cb in Callbacks)
                {
                    cb.Run(this);
                }

                if (_stop)
                {
                    break;
                }

                yield return(this);

                if (LearningRateScheduler != null)
                {
                    bool update = LearningRateScheduler.UpdateLearningRate(Epoch, Iteration, Loss);
                    if (update)
                    {
                        Learner.ResetLearningRate(new TrainingParameterScheduleDouble(LearningRateScheduler.LearningRate));
                    }
                }

                EpochIncremented = false;
                if (minibatch.SweepEnd)
                {
                    ++Epoch;
                    EpochIncremented = true;
                }
            }
        }