Пример #1
0
        public void Run()
        {
            var task    = _trainer.Train(_cts.Token);
            var running = true;

            while (running)
            {
                var c = Console.ReadKey().Key;
                switch (c)
                {
                case ConsoleKey.Q:
                    Console.WriteLine("Stopping training manually");
                    running = false;
                    break;

                case ConsoleKey.R:
                    Console.WriteLine("Reseting optimizer cache!");
                    _trainer.Pause();
                    _network.ResetOptimizer();
                    _trainer.Resume();
                    break;
                }
            }

            _cts.Cancel();
            Task.WaitAll(task);
        }
Пример #2
0
        private static void RegressionUpdate(int n, double[] x, TrainerBase trainer, double[] y)
        {
            var netx   = new Volume(1, 1, 1);
            var avloss = 0.0;

            for (var iters = 0; iters < 50; iters++)
            {
                for (var ix = 0; ix < n; ix++)
                {
                    netx.Set(0, 0, 0, x[ix]);
                    trainer.Train(netx, y[ix]);
                    avloss += trainer.Loss;
                }
            }

            avloss /= n * 50.0;
            Console.WriteLine("Loss:" + avloss);
        }
Пример #3
0
        private static void Classify2DUpdate(int n, List <double[]> data, TrainerBase trainer, List <int> labels)
        {
            var netx   = new Volume(1, 1, 2);
            var avloss = 0.0;

            for (var iters = 0; iters < 50; iters++)
            {
                for (var ix = 0; ix < n; ix++)
                {
                    netx.Set(0, 0, 0, data[ix][0]);
                    netx.Set(0, 0, 1, data[ix][1]);

                    trainer.Train(netx, labels[ix]);
                    avloss += trainer.Loss;
                }
            }

            avloss /= n * 50.0;
            Console.WriteLine("Loss:" + avloss);
        }
Пример #4
0
        private static void RegressionUpdate(int n, double[] x, TrainerBase <double> trainer, double[] y)
        {
            var netx   = BuilderInstance.Volume.SameAs(new Shape(1, 1, 1, n));
            var nety   = BuilderInstance.Volume.SameAs(new Shape(1, 1, 1, n));
            var avloss = 0.0;

            for (var ix = 0; ix < n; ix++)
            {
                netx.Set(0, 0, 0, ix, x[ix]);
                nety.Set(0, 0, 0, ix, y[ix]);
            }

            for (var iters = 0; iters < 50; iters++)
            {
                trainer.Train(netx, nety);
                avloss += trainer.Loss;
            }

            avloss /= n * 50.0;
            Console.WriteLine("Loss:" + avloss);
        }
Пример #5
0
        private static void Classify2DUpdate(int n, List <double[]> data, TrainerBase <double> trainer, List <int> labels)
        {
            var avloss = 0.0;

            //var netx = new Volume(new double[2], new Shape(1, 1, 2, 1));
            //for (var iters = 0; iters < 50; iters++)
            //{
            //    for (var ix = 0; ix < n; ix++)
            //    {
            //        var hotLabels = new Volume(new double[2], new Shape(1, 1, 2, 1));
            //        hotLabels.Set(0, 0, labels[ix], 0, 1.0);

            //        netx.Set(0, 0, 0, data[ix][0]);
            //        netx.Set(0, 0, 1, data[ix][1]);

            //        trainer.Train(netx, hotLabels);
            //        avloss += trainer.Loss;
            //    }
            //}

            var netx      = new Volume(new double[2 * n], new Shape(1, 1, 2, n));
            var hotLabels = new Volume(new double[2 * n], new Shape(1, 1, 2, n));

            for (var ix = 0; ix < n; ix++)
            {
                hotLabels.Set(0, 0, labels[ix], ix, 1.0);

                netx.Set(0, 0, 0, ix, data[ix][0]);
                netx.Set(0, 0, 1, ix, data[ix][1]);
            }

            for (var iters = 0; iters < 50; iters++)
            {
                trainer.Train(netx, hotLabels);
                avloss += trainer.Loss;
            }

            avloss /= 50.0;
            Console.WriteLine("Loss:" + avloss);
        }
 public void BatchTrain(double[][] batchInputs, double[][] batchOutputs, int iterations, Action <double, int, string> progressCallback)
 {
     trainer.BatchSize = batchInputs.Length;
     foreach (int currentIteration in Enumerable.Range(1, iterations))
     {
         Randomizer.Shuffle(batchInputs, batchOutputs);
         (Volume <double> inputs, Volume <double> outputs) = GetVolumeDataSetsFromArrays(batchInputs, batchOutputs);
         trainer.Train(inputs, outputs);
         var error = network.GetCostLoss(inputs, outputs);
         if (progressCallback != null)
         {
             progressCallback(error, currentIteration, "Supervised");
         }
         inputs.Dispose();
         outputs.Dispose();
         if (this.ShouldStopTraning)
         {
             this.ShouldStopTraning = false;
             break;
         }
     }
 }
Пример #7
0
        private static void Classify2DUpdate(int n, List <double[]> data, TrainerBase <double> trainer, List <int> labels)
        {
            var avloss    = 0.0;
            var netx      = BuilderInstance.Volume.SameAs(new Shape(1, 1, 2, n));
            var hotLabels = BuilderInstance.Volume.SameAs(new Shape(1, 1, 2, n));

            for (var ix = 0; ix < n; ix++)
            {
                hotLabels.Set(0, 0, labels[ix], ix, 1.0);

                netx.Set(0, 0, 0, ix, data[ix][0]);
                netx.Set(0, 0, 1, ix, data[ix][1]);
            }

            for (var iters = 0; iters < 50; iters++)
            {
                trainer.Train(netx, hotLabels);
                avloss += trainer.Loss;
            }

            avloss /= 50.0;
            Console.WriteLine(k++ + " Loss:" + avloss);
        }