Beispiel #1
0
        public static BackpropAlgorithm CreateMainColorsDemo1()
        {
            Console.WriteLine("init CreateMainColorsDemo1");
            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 48)
            {
                IsTraining = true
            };

            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DenseLayer(outputDim: 12, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 1.1D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.0001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Beispiel #2
0
        public static ConvNet SimpleLinearNetworkWithDropout(double drate, int dseed)
        {
            var net = new ConvNet(1, 1, activation: new Mocks.LinearActivation());

            net.IsTraining = true;
            var layer1 = new DenseLayer(1);

            net.AddLayer(layer1);
            var layer2 = new DenseLayer(1);

            net.AddLayer(layer2);
            var layer3 = new DropoutLayer(drate, dseed);

            layer3.Mask = new bool[1][, ] {
                new bool[, ] {
                    { true }
                }
            };
            net.AddLayer(layer3);
            var layer4 = new DenseLayer(1);

            net.AddLayer(layer4);
            net._Build();

            layer1.Weights[1] = 1;
            layer1.Weights[0] = 3;
            layer2.Weights[1] = -1;
            layer2.Weights[0] = 1;
            layer4.Weights[1] = 2;
            layer4.Weights[0] = -1;

            return(net);
        }
Beispiel #3
0
        public static ConvNet SimpleLinearNetwork(IActivationFunction activation = null)
        {
            activation = activation ?? new Mocks.LinearActivation();

            var net = new ConvNet(1, 1, activation: activation);

            net.IsTraining = true;
            var layer1 = new DenseLayer(1);

            net.AddLayer(layer1);
            var layer2 = new DenseLayer(1);

            net.AddLayer(layer2);
            var layer3 = new DenseLayer(1);

            net.AddLayer(layer3);
            net._Build();

            layer1.Weights[1] = 1;
            layer1.Weights[0] = 3;
            layer2.Weights[1] = -1;
            layer2.Weights[0] = 1;
            layer3.Weights[1] = 2;
            layer3.Weights[0] = -1;

            return(net);
        }
Beispiel #4
0
        public static BackpropAlgorithm CreateMNISTHardDemo(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateMNISTHardDemo");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: Activation.Logistic(1)));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.005D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                LossFunction          = Loss.Euclidean,
                EpochCount            = 50,
                LearningRate          = lrate,
                BatchSize             = 1,
                LearningRateScheduler = LearningRateScheduler.Constant(lrate)
            };

            return(alg);
        }
Beispiel #5
0
        /// <summary>
        ///
        /// </summary>
        public static BackpropAlgorithm CreateMNISTSimpleDemoWithBatching(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateMNISTSimpleDemoWithBatching");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 18, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.0001D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.RMSProp,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            return(alg);
        }
Beispiel #6
0
        /// <summary>
        /// Error: 19.1
        /// </summary>
        public static BackpropAlgorithm CreateKaggleCatOrDogDemo_Pretrained()
        {
            Console.WriteLine("init CreateKaggleCatOrDogDemo_Pretrained");

            ConvNet net;
            var     assembly = Assembly.GetExecutingAssembly();

            using (var stream = assembly.GetManifestResourceStream("ML.DeepTests.Pretrained.cn_e16_p37.65.mld"))
            {
                net            = ConvNet.Deserialize(stream);
                net.IsTraining = true;
            }

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Beispiel #7
0
        public void Gradient_1ConvLayer_1Iter_Euclidean()
        {
            // arrange

            var net = new ConvNet(3, 1, 1)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 1, activation: Activation.Atan));
            net._Build();
            net.RandomizeParameters(seed: 0);

            var point1 = RandomPoint(3, 1, 1);
            var point2 = RandomPoint(3, 1, 1); // just for 2 dim output
            var sample = new ClassifiedSample <double[][, ]>();

            sample[point1] = CLASSES[0];
            sample[point2] = CLASSES[1];

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.1D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            alg.RunIteration(point1, EXPECTED[0]);

            // assert
            AssertNetGradient(alg, point1, EXPECTED[0]);
        }
Beispiel #8
0
        private ML.DeepMethods.Algorithms.BackpropAlgorithm createCNNAlg_NN_ForTest()
        {
            var cnn = new ConvNet(2, 1)
            {
                IsTraining = true
            };

            cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1)));
            cnn.AddLayer(new MaxPoolingLayer(1, 1));
            //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1)));
            cnn.AddLayer(new DropoutLayer(0.1));
            cnn.AddLayer(new FlattenLayer(3, activation: Activation.Logistic(1)));
            //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1)));
            cnn.AddLayer(new MaxPoolingLayer(1, 1));

            cnn._Build();
            cnn.RandomizeParameters(0);

            var sample = new ClassifiedSample <double[][, ]>();

            foreach (var obj in Data.TrainingSample)
            {
                var data = obj.Key;

                var key = new double[data.Length][, ];
                for (int i = 0; i < data.Length; i++)
                {
                    key[i] = new double[1, 1];
                }

                for (int i = 0; i < data.Length; i++)
                {
                    key[i][0, 0] = data[i];
                }
                sample[key] = obj.Value;
            }

            var alg = new ML.DeepMethods.Algorithms.BackpropAlgorithm(sample, cnn);

            alg.EpochCount   = 6000;
            alg.LearningRate = 0.01D;
            alg.BatchSize    = 1;
            alg.LossFunction = Loss.Euclidean;

            int epoch = 0;

            alg.EpochEndedEvent += (o, e) =>
            {
                if (epoch++ % 300 != 0)
                {
                    return;
                }
                Console.WriteLine("----------------Epoch #: {0}", epoch);
                Console.WriteLine("L:\t{0}", alg.LossValue);
                Console.WriteLine("DL:\t{0}", alg.LossDelta);
                Console.WriteLine("DW:\t{0}", alg.Step2);
            };

            return(alg);
        }
Beispiel #9
0
        private void testSerDeser()
        {
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 18, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var path = @"F:\Work\AgniCore\_testftp\cn_e5_p53.88.mld";

            //using (var file = System.IO.File.Open(path, System.IO.FileMode.Create, System.IO.FileAccess.Write))
            //  net.Serialize(file);
            using (var file = System.IO.File.Open(path, System.IO.FileMode.Open, System.IO.FileAccess.Read))
            {
                var res = ConvNet.Deserialize(file);
            }
        }
Beispiel #10
0
        protected override void Test()
        {
            ConvNet lenet1;
            var     fpath = Path.Combine(OutputPath, "cn_e26_p0.06.mld");

            //var fpath = @"F:\Work\git\ML\solution\ML.DigitsDemo\lenet1.mld";

            using (var stream = File.Open(fpath, FileMode.Open))
            {
                lenet1 = ConvNet.Deserialize(stream);
            }
            var alg = new BackpropAlgorithm(lenet1);

            var fout = Path.Combine(SrcPath, "result1.csv");

            using (var file = File.Open(fout, FileMode.Create, FileAccess.Write))
                using (var writer = new StreamWriter(file))
                {
                    writer.WriteLine("ImageId,Label");

                    int num = 1;
                    foreach (var data in m_Test)
                    {
                        var mark = alg.Predict(data);
                        var idx  = GeneralUtils.ArgMax(mark);
                        var cls  = m_Classes[idx];
                        writer.WriteLine("{0},{1}", num++, (int)cls.Value);
                    }

                    writer.Flush();
                }
        }
Beispiel #11
0
        protected override void Test()
        {
            ConvNet lenet1;
            var     fpath = Path.Combine(ResultsFolder, "cn_e26_p0.06.mld");

            //var fpath = @"F:\Work\git\ML\solution\ML.DigitsDemo\lenet1.mld";

            using (var stream = File.Open(fpath, FileMode.Open))
            {
                lenet1 = ConvNet.Deserialize(stream);
            }
            var alg = new BackpropAlgorithm(m_Training, lenet1);

            var fout = Path.Combine(MnistSrc, "result1.csv");

            using (var file = File.Open(fout, FileMode.Create, FileAccess.Write))
                using (var writer = new StreamWriter(file))
                {
                    writer.WriteLine("ImageId,Label");

                    int num = 1;
                    foreach (var data in m_Test)
                    {
                        var cls = alg.Classify(data);
                        writer.WriteLine("{0},{1}", num++, (int)cls.Value);
                    }

                    writer.Flush();
                }
        }
Beispiel #12
0
        public static BackpropAlgorithm CreateKaggleCatOrDogFiltersDemo1_Pretrained(string fpath)
        {
            Console.WriteLine("init CreateKaggleCatOrDogFiltersDemo1_Pretrained");

            ConvNet net;
            var     assembly = Assembly.GetExecutingAssembly();

            using (var stream = System.IO.File.Open(fpath, System.IO.FileMode.Open, System.IO.FileAccess.Read))
            {
                net            = ConvNet.Deserialize(stream);
                net.IsTraining = true;
            }

            var lrate = 0.001D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Beispiel #13
0
 public BackpropAlgorithm(ConvNet net) : base(net)
 {
     m_EpochCount          = DFT_EPOCH_COUNT;
     m_LearningRate        = DFT_LEARNING_RATE;
     m_Stop                = DTF_STOP_CRITERIA;
     m_BatchSize           = DFT_BATCH_SIZE;
     m_MaxBatchThreadCount = DFT_BATCH_THREAD_COUNT;
 }
Beispiel #14
0
        private void initNet()
        {
            var assembly = Assembly.GetExecutingAssembly();

            using (var stream = assembly.GetManifestResourceStream("ML.DigitsDemo.mnist.mld"))
            {
                m_Network = ConvNet.Deserialize(stream);
            }
        }
Beispiel #15
0
        protected ConvNetAlgorithmBase(ConvNet net)
        {
            if (net == null)
            {
                throw new MLException("Network can not be null");
            }

            m_Net = net;
        }
Beispiel #16
0
        protected ConvNetAlgorithmBase(ClassifiedSample <double[][, ]> trainingSample, ConvNet net)
            : base(trainingSample)
        {
            if (net == null)
            {
                throw new MLException("Network can not be null");
            }

            m_Net = net;
        }
        void run_optimization(ConvNet conv_net, OptimizerV2 optimizer, Tensor x, Tensor y)
        {
            using var g = tf.GradientTape();
            var pred = conv_net.Apply(x, training: true);
            var loss = cross_entropy_loss(pred, y);

            // Compute gradients.
            var gradients = g.gradient(loss, conv_net.trainable_variables);

            // Update W and b following gradients.
            optimizer.apply_gradients(zip(gradients, conv_net.trainable_variables.Select(x => x as ResourceVariable)));
        }
Beispiel #18
0
        public void Gradient_DifferentLayers_1Iter_CrossEntropy_Regularization()
        {
            // arrange

            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 5)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 3, padding: 1));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2, activation: Activation.Exp));
            net.AddLayer(new ActivationLayer(activation: Activation.Tanh));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 3, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var sample = new ClassifiedSample <double[][, ]>();

            for (int i = 0; i < 3; i++)
            {
                var point = RandomPoint(1, 5, 5);
                sample[point] = new Class(i.ToString(), i);
            }

            var regularizator = Regularizator.Composite(Regularizator.L1(0.1D), Regularizator.L2(0.3D));
            var alg           = new BackpropAlgorithm(net)
            {
                LearningRate  = 0.1D,
                LossFunction  = Loss.CrossEntropySoftMax,
                Regularizator = regularizator
            };

            alg.Build();

            // act
            var data     = sample.First();
            var expected = new double[3] {
                1.0D, 0.0D, 0.0D
            };

            alg.RunIteration(data.Key, expected);
            regularizator.Apply(alg.Gradient, alg.Net.Weights);
            ((DropoutLayer)alg.Net[4]).ApplyCustomMask = true;

            // assert
            AssertNetGradient(alg, data.Key, expected);
        }
Beispiel #19
0
 private void initNet()
 {
     try
     {
         var assembly = Assembly.GetExecutingAssembly();
         using (var stream = assembly.GetManifestResourceStream("ML.MainColorDemo.net.mld"))
         {
             m_Network            = ConvNet.Deserialize(stream);
             m_Network.IsTraining = false;
         }
     }
     catch (Exception error)
     {
         MessageBox.Show("Error while CNN deserialize: " + error.Message);
     }
 }
Beispiel #20
0
        public void Gradient_MNISTSimple_1Iter()
        {
            // arrange

            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 14)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 4, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation));

            net._Build();

            Randomize(net.Weights, -1.0D, 1.0D);

            var sample = new ClassifiedSample <double[][, ]>();

            for (int i = 0; i < 10; i++)
            {
                var point = RandomPoint(1, 14, 14);
                sample[point] = new Class(i.ToString(), i);
            }

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.005D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            var data     = sample.First();
            var expected = new double[10] {
                1.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D
            };

            alg.RunIteration(data.Key, expected);

            // assert
            AssertNetGradient(alg, data.Key, expected);
        }
Beispiel #21
0
        /// <summary>
        /// Error 21.65
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Trunc2ClassesDemo2_SEALED()
        {
            Console.WriteLine("init CreateCIFAR10Trunc2ClassesDemo2");

            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 16, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 16, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new FlattenLayer(outputDim: 256, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                Regularizator           = Regularizator.L2(0.001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Beispiel #22
0
        private ML.DeepMethods.Algorithms.BackpropAlgorithm createCNNAlg_NN_ForTest()
        {
            var cnn = new ConvNet(6, 1)
            {
                IsTraining = true
            };

            cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1)));
            cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1)));
            cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1)));
            //cnn.AddLayer(new MaxPoolingLayer(1, 1));
            //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1)));
            //cnn.AddLayer(new DropoutLayer(0.1));
            cnn.AddLayer(new FlattenLayer(2, activation: Activation.Logistic(1)));
            //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1)));
            //cnn.AddLayer(new MaxPoolingLayer(1, 1));

            cnn._Build();
            cnn.RandomizeParameters(0);

            var alg = new ML.DeepMethods.Algorithms.BackpropAlgorithm(cnn);

            alg.EpochCount   = 6000;
            alg.LearningRate = 0.01D;
            alg.BatchSize    = 1;
            alg.LossFunction = Loss.Euclidean;

            int epoch = 0;

            alg.EpochEndedEvent += (o, e) =>
            {
                if (epoch++ % 300 != 0)
                {
                    return;
                }
                Console.WriteLine("----------------Epoch #: {0}", epoch);
                Console.WriteLine("L:\t{0}", alg.LossValue);
                Console.WriteLine("DL:\t{0}", alg.LossDelta);
                Console.WriteLine("DW:\t{0}", alg.Step2);
            };

            return(alg);
        }
Beispiel #23
0
        // https://code.google.com/archive/p/cuda-convnet/   - CIFAR archtectures+errors

        /// <summary>
        /// Creates CNN for CIFAR-10 training (from https://cs.stanford.edu/people/karpathy/convnetjs/demo/cifar10.html)
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Demo2()
        {
            Console.WriteLine("init CreateCIFAR10Demo2");
            var activation = Activation.LeakyReLU();
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 40, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 60, windowSize: 5, padding: 2, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new FlattenLayer(outputDim: 1024, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 1024, activation: activation));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.05D;
            var alg   = new BackpropAlgorithm(net)
            {
                LossFunction            = Loss.Euclidean,
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                LearningRateScheduler   = LearningRateScheduler.TimeBased(lrate, 0.005D)
            };

            alg.Build();

            return(alg);
        }
Beispiel #24
0
        /// <summary>
        /// Creates CNN for CIFAR-10 training (from keras)
        /// </summary>
        public static BackpropAlgorithm CreateCIFAR10Demo1(ClassifiedSample <double[][, ]> training)
        {
            Console.WriteLine("init CreateCIFAR10Demo1");
            var activation = Activation.ReLU;
            var net        = new ConvNet(3, 32)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));

            net.AddLayer(new FlattenLayer(outputDim: 512, activation: Activation.ReLU));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: Activation.Logistic(1)));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.01D;
            var alg   = new BackpropAlgorithm(training, net)
            {
                LossFunction            = Loss.CrossEntropySoftMax,
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                Optimizer               = Optimizer.Adadelta,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            return(alg);
        }
Beispiel #25
0
        public static ConvNet TestNetwork1()
        {
            var activation = Activation.Atan;
            var net        = new ConvNet(1, 2, 2)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 2, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 1, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 3, activation: activation));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            return(net);
        }
        public bool Run()
        {
            tf.enable_eager_execution();

            PrepareData();

            // Build neural network model.
            var conv_net = new ConvNet(new ConvNetArgs
            {
                NumClasses = num_classes
            });

            // ADAM optimizer.
            var optimizer = keras.optimizers.Adam(learning_rate);

            // Run training for the given number of steps.
            foreach (var(step, (batch_x, batch_y)) in enumerate(train_data, 1))
            {
                // Run the optimization to update W and b values.
                run_optimization(conv_net, optimizer, batch_x, batch_y);

                if (step % display_step == 0)
                {
                    var pred = conv_net.Apply(batch_x);
                    var loss = cross_entropy_loss(pred, batch_y);
                    var acc  = accuracy(pred, batch_y);
                    print($"step: {step}, loss: {(float)loss}, accuracy: {(float)acc}");
                }
            }

            // Test model on validation set.
            {
                x_test = x_test["::100"];
                y_test = y_test["::100"];
                var pred = conv_net.Apply(x_test);
                accuracy_test = (float)accuracy(pred, y_test);
                print($"Test Accuracy: {accuracy_test}");
            }

            return(accuracy_test > 0.90);
        }
Beispiel #27
0
        /// <summary>
        /// Error = 0.92
        /// </summary>
        public static BackpropAlgorithm CreateMNISTSimpleDemo_SEALED()
        {
            Console.WriteLine("init CreateMNISTSimpleDemo_SEALED");
            var activation = Activation.LeakyReLU();
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 12, windowSize: 5, padding: 2));
            net.AddLayer(new ConvLayer(outputDepth: 12, windowSize: 5, padding: 2));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 24, windowSize: 5, padding: 2));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation));
            net.AddLayer(new FlattenLayer(outputDim: 32, activation: activation));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 10, activation: activation));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.001D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 500,
                LearningRate            = lrate,
                BatchSize               = 4,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 4,
                LossFunction            = Loss.Euclidean,
                Optimizer               = Optimizer.RMSProp,
                Regularizator           = Regularizator.L2(0.0001D),
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Beispiel #28
0
        public static BackpropAlgorithm CreateMNISTHardDemo()
        {
            Console.WriteLine("init CreateMNISTHardDemo");
            var activation = Activation.ReLU;
            var net        = new ConvNet(1, 28)
            {
                IsTraining = true
            };

            net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, padding: 1, activation: activation));
            net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2));
            net.AddLayer(new DropoutLayer(0.25));
            net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation));
            net.AddLayer(new DropoutLayer(0.5));
            net.AddLayer(new FlattenLayer(outputDim: 10, activation: Activation.Exp));

            net._Build();

            net.RandomizeParameters(seed: 0);

            var lrate = 0.005D;
            var alg   = new BackpropAlgorithm(net)
            {
                EpochCount              = 50,
                LearningRate            = lrate,
                BatchSize               = 8,
                UseBatchParallelization = true,
                MaxBatchThreadCount     = 8,
                LossFunction            = Loss.CrossEntropySoftMax,
                Optimizer               = Optimizer.RMSProp,
                LearningRateScheduler   = LearningRateScheduler.DropBased(lrate, 5, 0.5D)
            };

            alg.Build();

            return(alg);
        }
Beispiel #29
0
        public void Gradient_SimpleDropout_1Iter_Euclidean()
        {
            // arrange

            var net = new ConvNet(3, 1)
            {
                IsTraining = true
            };

            net.AddLayer(new DenseLayer(outputDim: 10, activation: Activation.Atan));
            net.AddLayer(new DropoutLayer(rate: 0.5D));
            net.AddLayer(new DenseLayer(outputDim: 2, activation: Activation.Atan));
            net._Build();
            net.RandomizeParameters(seed: 0);

            var point1 = RandomPoint(3, 1, 1);
            var point2 = RandomPoint(3, 1, 1); // just for 2 dim output
            var sample = new ClassifiedSample <double[][, ]>();

            sample[point1] = CLASSES[0];
            sample[point2] = CLASSES[1];

            var alg = new BackpropAlgorithm(net)
            {
                LearningRate = 0.1D,
                LossFunction = Loss.Euclidean
            };

            alg.Build();

            // act
            alg.RunIteration(point1, EXPECTED[0]);
            ((DropoutLayer)alg.Net[1]).ApplyCustomMask = true;

            // assert
            AssertNetGradient(alg, point1, EXPECTED[0]);
        }