Exemple #1
0
 protected SparseSETOptimizer(MomentumOptimizer <Real> optimizer, int beginStep, int endStep, int frequency, Real dropFraction = 0.1f, string dropFractionAnneal = "constant", string growInit = "zeros")
 {
     this._optimizer = optimizer;
     this._dropFractionInitialValue = dropFraction;
     this._beginStep    = beginStep;
     this._endStep      = endStep;
     this._frequency    = frequency;
     this._frequencyVal = frequency;
     lastUpdateStep     = -_frequencyVal;
 }
Exemple #2
0
        public MNISTNetwork(int inputSize, int hiddenSize, int outputSize) : base()
        {
            this.inputSize  = inputSize;
            this.hiddenSize = hiddenSize;
            this.outputSize = outputSize;

            affine1 = new AffineLayer(inputSize, hiddenSize);
            relu    = new ReLULayer();
            affine2 = new AffineLayer(hiddenSize, outputSize);
            softmax = new SoftmaxLayer();

            optimizer = new MomentumOptimizer(0.9f);
        }
Exemple #3
0
        public MNISTBatchNormalizationNetwork(int inputSize, int hiddenSize, int outputSize) : base()
        {
            this.inputSize  = inputSize;
            this.hiddenSize = hiddenSize;
            this.outputSize = outputSize;

            affine1 = new AffineLayer(inputSize, hiddenSize, Mathf.Sqrt(2.0f / inputSize));
            bn1     = new BatchNormalizationLayer(hiddenSize, hiddenSize);
            relu1   = new ReLULayer();

            affine2 = new AffineLayer(hiddenSize, hiddenSize, Mathf.Sqrt(2.0f / hiddenSize));
            bn2     = new BatchNormalizationLayer(hiddenSize, hiddenSize);
            relu2   = new ReLULayer();

            affine3 = new AffineLayer(hiddenSize, outputSize, Mathf.Sqrt(2.0f / hiddenSize));
            softmax = new SoftmaxLayer();

            optimizer = new MomentumOptimizer(0.9f);
        }
Exemple #4
0
        public void MomentumOptimizer_SimpleMultivar()
        {
            // arrange
            var func      = new Mocks.SimpleMultivar();
            var lr        = 0.1D;
            var mu        = 0.9D;
            var w         = new double[2][] { new[] { 1.0D, 1.0D }, new[] { 1.0D } };
            var optimizer = new MomentumOptimizer(mu);

            // act & assert

            optimizer.Push(w, func.Gradient(w), lr);
            Assert.AreEqual(0, w[0][0]);
            Assert.AreEqual(0.8, w[0][1]);
            Assert.AreEqual(0, w[1][0]);
            Assert.AreEqual(2.04, optimizer.Step2, EPS);

            optimizer.Push(w, func.Gradient(w), lr);
            Assert.AreEqual(-0.74, w[0][0], EPS);
            Assert.AreEqual(0.14, w[0][1], EPS);
            Assert.AreEqual(-0.74, w[1][0], EPS);
            Assert.AreEqual(1.5308, optimizer.Step2, EPS);

            optimizer.Push(w, func.Gradient(w), lr);
            Assert.AreEqual(-0.490, w[0][0], EPS);
            Assert.AreEqual(-0.834, w[0][1], EPS);
            Assert.AreEqual(-0.490, w[1][0], EPS);
            Assert.AreEqual(1.073676, optimizer.Step2, EPS);

            optimizer.Push(w, func.Gradient(w), lr);
            Assert.AreEqual(0.1562, w[0][0], EPS);
            Assert.AreEqual(-1.4062, w[0][1], EPS);
            Assert.AreEqual(0.1562, w[1][0], EPS);
            Assert.AreEqual(1.16256172, optimizer.Step2, EPS);

            optimizer.Push(w, func.Gradient(w), lr);
            Assert.AreEqual(0.2691, w[0][0], EPS);
            Assert.AreEqual(-1.01498, w[0][1], EPS);
            Assert.AreEqual(0.2691, w[1][0], EPS);
            Assert.AreEqual(0.17854591, optimizer.Step2, EPS);
        }
Exemple #5
0
        private static void Main()
        {
            Console.Title = "Digit Recognizer - Engine";

            var learningRate         = 0.00035;
            var epochs               = 10;
            var regularizationFactor = 15.0;

            LearningPipeline pipeline = new LearningPipeline()
                                        .UseGradientClipping()
                                        .UseL2Regularization(regularizationFactor)
                                        .UseDropout(0.5)
                                        .SetWeightsInitializer(InitializerType.RandomInitialization)
                                        .SetEpochCount(epochs);

            var layers = new List <NnLayer>
            {
                new NnLayer(784, 200, new LeakyRelu()),
                new NnLayer(200, 100, new LeakyRelu()),
                new NnLayer(100, 10, new Softmax())
            };

            var nn = new NeuralNetwork(layers, learningRate);

            var optimizer = new MomentumOptimizer(nn, new CrossEntropy(), 0.93);

            var provider = new BatchDataProvider(DirectoryHelper.ExpandedTrainLabelsPath, DirectoryHelper.ExpandedTrainImagesPath, 100);

            pipeline.Add(optimizer);

            pipeline.Add(nn);

            pipeline.Add(provider);

            PredictionModel model = pipeline.Run();

            var provider1 = new BatchDataProvider(DirectoryHelper.TestLabelsPath, DirectoryHelper.TestImagesPath, 10000);
            var acc       = 0.0;

            MnistImageBatch data = provider1.GetData();

            List <double[]> predictions = data.Pixels.Select(pixels => model.Predict(pixels)).ToList();

            for (var i = 0; i < data.Labels.Length; i++)
            {
                if (data.Labels[i] == predictions[i].ArgMax())
                {
                    acc++;
                }
            }

            acc /= 10000.0;

            Console.WriteLine($"Accuracy on the test data is: {acc:P2}");

            string basePath = Path.GetFullPath(Path.GetDirectoryName(AppDomain.CurrentDomain.BaseDirectory) +
                                               DirectoryHelper.ModelsFolder);

            string modelName = $"{Guid.NewGuid()}-{acc:N4}.nn";

            string filename = $"{basePath}/{modelName}";

            model.Save(filename);

            Console.ReadKey();
        }
Exemple #6
0
 public SparseRigLOptimizer(MomentumOptimizer <Real> optimizer, int beginStep, int endStep, int frequency, Real dropFraction = 0.1f, string dropFractionAnneal = "constant", string growInit = "zeros", Real initialAccScale = 0.0f) : base(optimizer, beginStep, endStep, frequency, dropFraction, dropFractionAnneal, growInit)
 {
     _initialAccScale = initialAccScale;
 }