private static void ExecuteNeuralNet(
            string name,
            TestNet net,
            int batchSize,
            int totalSets,
            int iterations)
        {
            var inputs = CreateSampleSets(net, batchSize, totalSets);

            var stopWatch = new Stopwatch();

            Console.WriteLine($"- {name} ------");
            stopWatch.Restart();

            var trainer = new SgdTrainer(net);

            trainer.LearningRate = 0.01;
            trainer.Momentum     = 0.5;
            trainer.BatchSize    = batchSize;

            for (var i = 0; i < iterations; i++)
            {
                foreach (var set in inputs)
                {
                    trainer.Train(set.Inputs[0], set.Outputs);
                }
            }

            stopWatch.Stop();

            Console.WriteLine("    total: {0:0.000}ms", stopWatch.ElapsedMilliseconds);
            Console.WriteLine("  forward: {0:0.000}ms", trainer.ForwardTimeMs);
            Console.WriteLine(" backward: {0:0.000}ms", trainer.BackwardTimeMs);
            Console.WriteLine("   update: {0:0.000}ms", trainer.UpdateWeightsTimeMs);
        }
Exemple #2
0
        private void CreateNetwork()
        {
            net = new Net <float>();

            net.AddLayer(new InputLayer(windowSize, windowSize, 1));
            net.AddLayer(new ConvLayer(5, 5, 3)
            {
                Stride = 1, Pad = 0
            });                                // 21 x 21
            net.AddLayer(new ReluLayer());     // 21 x 21
            net.AddLayer(new PoolLayer(2, 2)); // 10 x 10
            net.AddLayer(new ConvLayer(5, 5, 3)
            {
                Stride = 1, Pad = 0
            });                                                          // 6 x 6
            net.AddLayer(new ReluLayer());
            net.AddLayer(new PoolLayer(2, 2));
            net.AddLayer(new FullyConnLayer(16));
            net.AddLayer(new TanhLayer());
            net.AddLayer(new FullyConnLayer(2));
            net.AddLayer(new SoftmaxLayer(2));

            trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01f, L2Decay = 0.001f
            };
        }
 public TrainingServiceProvider()
 {
     _framework = "keras";
     _model = BuildKerasModel();
     _cnn = BuildCNN();
     _trainer = new SgdTrainer<double>(_cnn);
 }
        private void MnistDemo()
        {
            var datasets = new DataSets();

            if (!datasets.Load(100))
            {
                return;
            }

            // Create network
            this._net = new Net <double>();
            this._net.AddLayer(new InputLayer(28, 28, 1));
            this._net.AddLayer(new ConvLayer(5, 5, 8)
            {
                Stride = 1, Pad = 2
            });
            this._net.AddLayer(new ReluLayer());
            this._net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            this._net.AddLayer(new ConvLayer(5, 5, 16)
            {
                Stride = 1, Pad = 2
            });
            this._net.AddLayer(new ReluLayer());
            this._net.AddLayer(new PoolLayer(3, 3)
            {
                Stride = 3
            });
            this._net.AddLayer(new FullyConnLayer(10));
            this._net.AddLayer(new SoftmaxLayer(10));

            this._trainer = new SgdTrainer <double>(this._net)
            {
                LearningRate = 0.01,
                BatchSize    = 20,
                L2Decay      = 0.001,
                Momentum     = 0.9
            };

            Console.WriteLine("Convolutional neural network learning...[Press any key to stop]");
            do
            {
                var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize);
                Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                var testSample = datasets.Test.NextBatch(this._trainer.BatchSize);
                Test(testSample.Item1, testSample.Item3, this._testAccWindow);

                Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss,
                                  Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2),
                                  Math.Round(this._testAccWindow.Items.Average() * 100.0, 2));

                Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms", this._stepCount,
                                  Math.Round(this._trainer.ForwardTimeMs, 2),
                                  Math.Round(this._trainer.BackwardTimeMs, 2));
            } while (!Console.KeyAvailable);
        }
        public static void Classify2DDemo()
        {
            var net = new Net <double>();

            net.AddLayer(new InputLayer <double>());
            net.AddLayer(new FullyConnLayer <double>(6));
            net.AddLayer(new TanhLayer <double>());
            net.AddLayer(new FullyConnLayer <double>(2));
            net.AddLayer(new TanhLayer <double>());
            net.AddLayer(new FullyConnLayer <double>(2));
            net.AddLayer(new SoftmaxLayer <double>());

            // Data
            var data   = new List <double[]>();
            var labels = new List <int>();

            data.Add(new[] { -0.4326, 1.1909 });
            labels.Add(1);
            data.Add(new[] { 3.0, 4.0 });
            labels.Add(1);
            data.Add(new[] { 0.1253, -0.0376 });
            labels.Add(1);
            data.Add(new[] { 0.2877, 0.3273 });
            labels.Add(1);
            data.Add(new[] { -1.1465, 0.1746 });
            labels.Add(1);
            data.Add(new[] { 1.8133, 1.0139 });
            labels.Add(0);
            data.Add(new[] { 2.7258, 1.0668 });
            labels.Add(0);
            data.Add(new[] { 1.4117, 0.5593 });
            labels.Add(0);
            data.Add(new[] { 4.1832, 0.3044 });
            labels.Add(0);
            data.Add(new[] { 1.8636, 0.1677 });
            labels.Add(0);
            data.Add(new[] { 0.5, 3.2 });
            labels.Add(1);
            data.Add(new[] { 0.8, 3.2 });
            labels.Add(1);
            data.Add(new[] { 1.0, -2.2 });
            labels.Add(1);
            var n = labels.Count;

            var trainer = new SgdTrainer <double>(net, 0.01);

            do
            {
                Classify2DUpdate(n, data, trainer, labels);
            } while (!Console.KeyAvailable);

            // Display graph
            var vm  = new ViewModel <double>(net.Cost);
            var app = new Application();

            app.Run(new GraphControl {
                DataContext = vm
            });
        }
Exemple #6
0
        /// <summary>
        /// Train network
        /// </summary>
        /// <param name="val1">The first value.</param>
        /// <param name="val2">The second value.</param>
        /// <param name="val3">The third value.</param>
        /// <param name="val4">The fourth value.</param>
        internal void TrainNetwork(double val1, double val2, double val3, double val4)
        {
            var trainer = new SgdTrainer(_net)
            {
                LearningRate = 0.3, L2Decay = -0.5
            };

            trainer.Train(_trainResult, BuilderInstance <double> .Volume?.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 },
                                                                               new Shape((int)val1, (int)val2, (int)val3, (int)val4)));
        }
Exemple #7
0
        public double TeachCNN(string pathL, string pathT, int acc, double learnRate, int size, double mom)
        {
            if (net.Layers.Count == 0)
            {
                CreateCNN();
            }
            var datasets = new DataSets(pathL, pathT);

            Console.WriteLine("DataSets Created");
            if (!datasets.Load())
            {
                return(-2);
            }
            Console.WriteLine("DataSets Loaded");
            Aim     = acc;
            trainer = new SgdTrainer <double>(net)
            {
                LearningRate = learnRate,
                BatchSize    = size,
                Momentum     = mom
            };

            if (net.Layers.Count != 0)
            {
                do
                {
                    var trainSample = datasets.Train.NextBatch(trainer.BatchSize, classes);
                    Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                    var testSample = datasets.Test.NextBatch(trainer.BatchSize, classes);
                    Test(testSample.Item1, testSample.Item3, testAccWindow);

                    Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}% Average: {3}%", trainer.Loss,
                                      Math.Round(trainAccWindow.Items.Average() * 100.0, 2),
                                      Math.Round(testAccWindow.Items.Average() * 100.0, 2),
                                      (Math.Round(trainAccWindow.Items.Average() * 100.0, 2) +
                                       Math.Round(testAccWindow.Items.Average() * 100.0, 2)) / 2);

                    Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms", stepCount,
                                      Math.Round(trainer.ForwardTimeMs, 2),
                                      Math.Round(trainer.BackwardTimeMs, 2));
                    Acc = (Math.Round(testAccWindow.Items.Average() * 100.0, 2) + Math.Round(trainAccWindow.Items.Average() * 100.0, 2)) / 2;
                } while (Acc < Aim);

                Console.WriteLine($"{stepCount}");
                isNetLearned = true;
                return(Acc);
            }
            else
            {
                return(-1);
            }
        }
        private void SetTrainer()
        {
            this._trainer = new SgdTrainer <double>(this._net)
            {
                LearningRate = double.Parse(txt_LearingRate.Text),
                BatchSize    = int.Parse(txt_BatchSize.Text),
                L2Decay      = 0.001,
                Momentum     = 0.9
            };

            this._train_accuracy = new CircularBuffer <double>(this._trainer.BatchSize);
        }
Exemple #9
0
        public void Init()
        {
            // species a 2-layer neural network with one hidden layer of 20 neurons
            Net        = new Net <double>();
            NetTrainer = new SgdTrainer(Net)
            {
                LearningRate = 0.02, L2Decay = 0.005
            };

            // input layer declares size of input. here: 2-D data
            // ConvNetJS works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
            // then the first two dimensions (width, height) will always be kept at size 1
            //30 input nodes, one for each 10px of 300px ground to show obstacles as features
            Net.AddLayer(new InputLayer(1, 1, TrainerConfig.InputNodesCount));

            // declare 20 neurons
            Net.AddLayer(new FullyConnLayer(TrainerConfig.HiddenLayerNodesCount));

            // declare a ReLU (rectified linear unit non-linearity)
            Net.AddLayer(new ReluLayer());

            // declare a fully connected layer that will be used by the softmax layer
            Net.AddLayer(new FullyConnLayer(2));

            // declare the linear classifier on top of the previous hidden layer
            Net.AddLayer(new SoftmaxLayer(2));

            var batch = 50;

            for (var j = 0; j < batch; j++)
            {
                Train(GenerateTrainData(), true);
            }

            //var x =
            //    new Volume(new[] {0.0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0}, new Shape(30));

            //var prob = Forward(x);

            //// prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
            //Console.WriteLine(prob.Get(0) >= 0.5 ? "Jump" : "Stay"); // prints e.g. 0.50101

            //NetTrainer.Train(x, new Volume(new[] { 0.0 }, new Shape(1, 1, 1, 1))); // train the network, specifying that x is class zero

            //var prob2 = Net.Forward(x);
            //Console.WriteLine("probability that x is class 0: " + prob2.Get(0));
            // now prints 0.50374, slightly higher than previous 0.50101: the networks
            // weights have been adjusted by the Trainer to give a higher probability to
            // the class we trained the network with (zero)
        }
Exemple #10
0
    // Use this for initialization
    void Start()
    {
        exp  = new Experience[experienceSize];
        expi = 0;
        expn = 0;
        t    = 0;
        r0   = -99f;

        // species a 2-layer neural network with one hidden layer of 20 neurons
        net = new Net();

        // input layer declares size of input. here: 2-D data
        // ConvNetSharp works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
        // then the first two dimensions (width, height) will always be kept at size 1
        net.AddLayer(new InputLayer(1, 1, numStates));

        // declare 20 neurons, followed by ReLU (rectified linear unit non-linearity)
        net.AddLayer(new FullyConnLayer(hiddenNeurons, Activation.Relu));

        net.AddLayer(new FullyConnLayer(hiddenNeurons, Activation.Relu));

        // declare the linear classifier on top of the previous hidden layer
        net.AddLayer(new RegressionLayer(numActions));

        double[] weights = { 0.3, -0.5, 0.1, 0.9, 0.6 };



        // forward a random data point through the network
        var x = new Volume(weights);

        var prob = net.Forward(x);

        // prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
        Debug.Log("probability that x is class 0: " + prob.Weights[0]); // prints e.g. 0.50101

        trainer = new SgdTrainer(net)
        {
            LearningRate = 0.01, L2Decay = 0.001, Momentum = 0.0, BatchSize = 1
        };

        //trainer.Train(x, 0); // train the network, specifying that x is class zero

        Volume prob2 = net.Forward(x);

        Debug.Log("probability that x is class 0: " + prob2.Weights[0]);
        // now prints 0.50374, slightly higher than previous 0.50101: the networks
        // weights have been adjusted by the Trainer to give a higher probability to
        // the class we trained the network with (zero)
    }
Exemple #11
0
        private void Train(int epochs = 1)
        {
            var datasets = new DataSets();

            if (!datasets.Load())
            {
                return;
            }

            if (this._net == null)
            {
                this._net = new Net <double>();
                this._net.AddLayer(new InputLayer(28, 28, 1));
                this._net.AddLayer(new LeakyReluLayer(0.05));
                this._net.AddLayer(new FullyConnLayer(200));
                this._net.AddLayer(new LeakyReluLayer(0.05));
                this._net.AddLayer(new FullyConnLayer(10));
                this._net.AddLayer(new SoftmaxLayer(10));
            }
            this._trainer = new SgdTrainer <double>(this._net)
            {
                LearningRate = 0.01, //start with higher learning rate and decrease gradually
                BatchSize    = 20,
            };

            int iterations = datasets.Train._trainImages.Count / this._trainer.BatchSize * epochs;

            for (int iteration = 1; iteration < iterations; iteration++)
            {
                var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize);
                Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                var testSample = datasets.Test.NextBatch(this._trainer.BatchSize);
                Test(testSample.Item1, testSample.Item3, this._testAccWindow);

                Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss.ToString("0.00"),
                                  Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2),
                                  Math.Round(this._testAccWindow.Items.Average() * 100.0, 2));

                Console.WriteLine("Iteration: {0} StepCount: {1} Fwd: {2}ms Bckw: {3}ms", iteration, this._stepCount,
                                  Math.Round(this._trainer.ForwardTimeMs, 2),
                                  Math.Round(this._trainer.BackwardTimeMs, 2));
            }

            // Serialize to json and save to file
            var fileName = @"ConvNetModel_" + DateTime.Now.ToString("yyyyMMdd_HHmmss") + ".json";

            File.WriteAllText(fileName, this._net.ToJson());
        }
Exemple #12
0
        private static void Main(string[] args)
        {
            var logger = new ConsoleLogger();

            try
            {
                var dataFolder = GetDataFolder(args);

                if (string.IsNullOrEmpty(dataFolder))
                {
                    return;
                }

                Control.UseNativeMKL();

                var(trainX, trainY, devX, devY) = LoadData(dataFolder, logger);

                bool repeat;
                do
                {
                    var randomSeed = 13;
                    var network    = NetworkBuilder.Build(28 * 28, new LayerOptions(10, new Sigmoid()), new[]
                    {
                        new LayerOptions(30, new Sigmoid()),
                        //new LayerOptions(30, new Sigmoid()),
                        //new LayerOptions(30, new Sigmoid()),
                    }, randomSeed);

                    var trainer = new SgdTrainer(30, 10, 3.0, new QuadraticCostFunction(), logger, randomSeed);

                    var(randomTrainX, randomTrainY) = Shuffler.Shuffle(randomSeed, trainX, trainY);

                    PrintDataHistograms(trainY, devY, logger);

                    trainer.Train(network, randomTrainX, randomTrainY, 0.95);

                    DisplayTestPrecision(devX, devY, network, logger);

                    logger.Log("Press key to exit. \"r\" to repeat...");
                    var answer = Console.ReadKey();

                    repeat = answer.KeyChar == 'r';
                } while (repeat);
            }
            catch (Exception e)
            {
                logger.Log(e.Message);
            }
        }
Exemple #13
0
        /// <summary>
        ///     This sample shows how to serialize and deserialize a ConvNetSharp.Core network
        ///     1) Network creation
        ///     2) Dummy Training (only use a single data point)
        ///     3) Serialization
        ///     4) Deserialization
        /// </summary>
        private static void Main()
        {
            // 1) Network creation
            var net = new Net <double>();

            net.AddLayer(new InputLayer(1, 1, 2));
            net.AddLayer(new FullyConnLayer(20));
            net.AddLayer(new ReluLayer());
            net.AddLayer(new FullyConnLayer(10));
            net.AddLayer(new SoftmaxLayer(10));

            // 2) Dummy Training (only use a single data point)
            var x = BuilderInstance.Volume.From(new[] { 0.3, -0.5 }, new Shape(2));
            var y = BuilderInstance.Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(10));

            var count   = 0;
            var trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01
            };

            do
            {
                trainer.Train(x, y); // train the network, specifying that x is class zero
                Console.WriteLine($"Loss: {trainer.Loss}");
                count++;
            } while (trainer.Loss > 1e-2);

            Console.WriteLine($"{count}");

            // Forward pass with original network
            var prob1 = net.Forward(x);

            Console.WriteLine("probability that x is class 0: " + prob1.Get(0));

            // 3) Serialization
            var json = net.ToJson();

            // 4) Deserialization
            var deserialized = SerializationExtensions.FromJson <double>(json);

            // Forward pass with deserialized network
            var prob2 = deserialized.Forward(x);

            Console.WriteLine("probability that x is class 0: " + prob2.Get(0)); // This should give exactly the same result as previous network evaluation

            Console.ReadLine();
        }
Exemple #14
0
        private void MnistDemo()
        {
            var datasets = new DataSets();

            if (!datasets.Load(100))
            {
                return;
            }

            // Create network
            this._net = FluentNet <double> .Create(24, 24, 1)
                        .Conv(5, 5, 8).Stride(1).Pad(2)
                        .Relu()
                        .Pool(2, 2).Stride(2)
                        .Conv(5, 5, 16).Stride(1).Pad(2)
                        .Relu()
                        .Pool(3, 3).Stride(3)
                        .FullyConn(10)
                        .Softmax(10)
                        .Build();

            this._trainer = new SgdTrainer <double>(this._net)
            {
                LearningRate = 0.01,
                BatchSize    = 20,
                L2Decay      = 0.001,
                Momentum     = 0.9
            };

            Console.WriteLine("Convolutional neural network learning...[Press any key to stop]");
            do
            {
                var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize);
                Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                var testSample = datasets.Test.NextBatch(this._trainer.BatchSize);
                Test(testSample.Item1, testSample.Item3, this._testAccWindow);

                Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss,
                                  Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2),
                                  Math.Round(this._testAccWindow.Items.Average() * 100.0, 2));

                Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms", this._stepCount,
                                  Math.Round(this._trainer.ForwardTimeMs, 2),
                                  Math.Round(this._trainer.BackwardTimeMs, 2));
            } while (!Console.KeyAvailable);
        }
        private static void Main()
        {
            // species a 2-layer neural network with one hidden layer of 20 neurons
            var net = new Net <double>();

            // input layer declares size of input. here: 2-D data
            // ConvNetJS works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
            // then the first two dimensions (width, height) will always be kept at size 1
            net.AddLayer(new InputLayer(1, 1, 2));

            // declare 20 neurons
            net.AddLayer(new FullyConnLayer(20));

            // declare a ReLU (rectified linear unit non-linearity)
            net.AddLayer(new ReluLayer());

            // declare a fully connected layer that will be used by the softmax layer
            net.AddLayer(new FullyConnLayer(10));

            // declare the linear classifier on top of the previous hidden layer
            net.AddLayer(new SoftmaxLayer(10));

            // forward a random data point through the network
            var x = BuilderInstance.Volume.From(new[] { 0.3, -0.5 }, new Shape(2));

            var prob = net.Forward(x);

            // prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
            Console.WriteLine("probability that x is class 0: " + prob.Get(0)); // prints e.g. 0.50101

            var trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01, L2Decay = 0.001
            };

            trainer.Train(x, BuilderInstance.Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(1, 1, 10, 1))); // train the network, specifying that x is class zero

            var prob2 = net.Forward(x);

            Console.WriteLine("probability that x is class 0: " + prob2.Get(0));
            // now prints 0.50374, slightly higher than previous 0.50101: the networks
            // weights have been adjusted by the Trainer to give a higher probability to
            // the class we trained the network with (zero)
        }
Exemple #16
0
 public CNN(int[] featureNum, int actionNum, int batchSize = 31)
 {
     //get channel
     _channel   = featureNum[0];
     _width     = featureNum[1];
     _height    = featureNum[2];
     _actionNum = actionNum;
     _batchSize = batchSize;
     //create cnn neural network
     _network = new Net <double>();
     _network.AddLayer(new InputLayer <double>(_width, _height, _channel));
     _network.AddLayer(new ConvLayer <double>(1, 1, 2)
     {
         Stride = 1, Pad = 2, BiasPref = 0.1f
     });
     _network.AddLayer(new ReluLayer <double>());
     _network.AddLayer(new PoolLayer <double>(2, 2)
     {
         Stride = 2
     });
     _network.AddLayer(new ConvLayer <double>(5, 5, 16)
     {
         Stride = 1, Pad = 2, BiasPref = 0.1f
     });
     _network.AddLayer(new ReluLayer <double>());
     _network.AddLayer(new PoolLayer <double>(3, 3)
     {
         Stride = 3
     });
     _network.AddLayer(new FullyConnLayer <double>(_actionNum));
     _network.AddLayer(new SoftmaxLayer <double>(_actionNum));
     //create trainer
     _trainer = new SgdTrainer <double>(_network)
     {
         LearningRate = 0.001,
         BatchSize    = batchSize,
         L2Decay      = 0.001,
         Momentum     = 0.9
     };
 }
Exemple #17
0
        private static void Regression1DDemo()
        {
            var net = new Net();

            net.AddLayer(new InputLayer(1, 1, 1));
            net.AddLayer(new FullyConnLayer(20));
            net.AddLayer(new ReluLayer());
            net.AddLayer(new FullyConnLayer(20));
            net.AddLayer(new SigmoidLayer());
            net.AddLayer(new FullyConnLayer(1));
            net.AddLayer(new RegressionLayer());

            var trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01, Momentum = 0.0, BatchSize = 1, L2Decay = 0.001
            };

            // Function we want to learn
            double[] x = { 0.0, 0.5, 1.0 };
            double[] y = { 0.0, 0.1, 0.2 };
            var      n = x.Length;

            // Training
            do
            {
                RegressionUpdate(n, x, trainer, y);
            } while (!Console.KeyAvailable);

            // Testing
            var netx = new Volume(1, 1, 1);

            for (var ix = 0; ix < n; ix++)
            {
                netx.Set(0, 0, 0, x[ix]);
                var result = net.Forward(netx);
            }
        }
Exemple #18
0
        private void MnistDemo()
        {
            BuilderInstance.Volume = new VolumeBuilder();

            var datasets = new DataSets();

            if (!datasets.Load(100))
            {
                return;
            }

            // Create network
            this._net = new Net <float>();
            this._net.AddLayer(new InputLayer(28, 28, 1));
            this._net.AddLayer(new ConvLayer(5, 5, 8)
            {
                Stride = 1, Pad = 2
            });
            this._net.AddLayer(new ReluLayer());
            this._net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            this._net.AddLayer(new ConvLayer(5, 5, 16)
            {
                Stride = 1, Pad = 2
            });
            this._net.AddLayer(new ReluLayer());
            this._net.AddLayer(new PoolLayer(3, 3)
            {
                Stride = 3
            });
            this._net.AddLayer(new FullyConnLayer(10));
            this._net.AddLayer(new SoftmaxLayer(10));

            // Fluent version
            //        this._net = FluentNet<float>.Create(24, 24, 1)
            //                   .Conv(5, 5, 8).Stride(1).Pad(2)
            //                   .Relu()
            //                   .Pool(2, 2).Stride(2)
            //                   .Conv(5, 5, 16).Stride(1).Pad(2)
            //                   .Relu()
            //                   .Pool(3, 3).Stride(3)
            //                   .FullyConn(10)
            //                   .Softmax(10)
            //                   .Build();

            this._trainer = new SgdTrainer(this._net)
            {
                LearningRate = 0.01f,
                BatchSize    = 1024,
                L2Decay      = 0.001f,
                Momentum     = 0.9f
            };

            Console.WriteLine("Convolutional neural network learning...[Press any key to stop]");
            do
            {
                var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize);
                Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                var testSample = datasets.Test.NextBatch(this._trainer.BatchSize);
                Test(testSample.Item1, testSample.Item3, this._testAccWindow);

                Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss,
                                  Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2),
                                  Math.Round(this._testAccWindow.Items.Average() * 100.0, 2));

                Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms Updt: {3}ms", this._stepCount,
                                  Math.Round(this._trainer.ForwardTimeMs, 2),
                                  Math.Round(this._trainer.BackwardTimeMs, 2),
                                  Math.Round(this._trainer.UpdateWeightsTimeMs, 2));
            } while (!Console.KeyAvailable);
        }
Exemple #19
0
        public Net <double> XOR()
        {
            var network = new Net <double>();

            network.AddLayer(new InputLayer(1, 1, 2));
            network.AddLayer(new FullyConnLayer(6));
            network.AddLayer(new ReluLayer());
            network.AddLayer(new FullyConnLayer(2));
            network.AddLayer(new ReluLayer());
            network.AddLayer(new RegressionLayer());

            List <int[]> data  = new List <int[]>();
            List <int>   label = new List <int>();

            data.Add(new int[] { 0, 0 });
            label.Add(0);

            data.Add(new[] { 0, 1 });
            label.Add(1);

            data.Add(new[] { 1, 0 });
            label.Add(1);

            data.Add(new[] { 1, 1 });
            label.Add(0);

            var n       = label.Count;
            var trainer = new SgdTrainer <double>(network)
            {
                LearningRate = 0.01, BatchSize = n
            };


            var x = BuilderInstance.Volume.SameAs(new Shape(1, 1, 2, n));
            var y = BuilderInstance.Volume.SameAs(new Shape(1, 1, 2, n));

            for (var i = 0; i < n; i++)
            {
                y.Set(0, 0, 0, i, label[i]);

                x.Set(0, 0, 0, i, data[i][0]);
                x.Set(0, 0, 1, i, data[i][1]);
            }

            do
            {
                var avloss = 0.0;

                trainer.Train(x, y);
                avloss = trainer.Loss;

                //avloss /= 50.0;
                Console.WriteLine(" Loss:" + avloss);
            } while (!Console.KeyAvailable);


            var input = BuilderInstance.Volume.SameAs(new Shape(1, 1, 2, n));

            for (var i = 0; i < n; i++)
            {
                for (var i2 = 0; i2 < 2; i2++)
                {
                    input.Set(0, 0, i2, i, data[i][i2]);
                }
            }

            var result = network.Forward(input);

            for (int i = 0; i < n; i++)
            {
                Console.WriteLine("{0} XOR {1} = {2}", data[i][0], data[i][1], result.Get(0, 0, 0, i));
            }
            return(network);
        }
Exemple #20
0
        private static void Main(string[] args)
        {
            // Load data

            var min_count       = 10;
            var polarity_cutoff = 0.1;

            var labels  = File.ReadAllLines("../../../../Data/labels.txt");
            var reviews = File.ReadAllLines("../../../../Data/reviews.txt");

            // Count words

            var vocab           = new Dictionary <string, int>();
            var positive_counts = new Dictionary <string, int>();
            var negative_counts = new Dictionary <string, int>();
            var pos_neg_ratios  = new Dictionary <string, double>();

            foreach (var pair in reviews.Zip(labels, (review, label) => new { review, label }))
            {
                var review = pair.review;
                var label  = pair.label;

                foreach (var word in review.ToLower().Split(' '))
                {
                    vocab.TryGetValue(word, out var count);
                    vocab[word] = count + 1;

                    var dico = label == "positive" ? positive_counts : negative_counts;
                    dico.TryGetValue(word, out count);
                    dico[word] = count + 1;

                    var otherDico = label == "positive" ? negative_counts : positive_counts;
                    otherDico.TryGetValue(word, out count);
                    otherDico[word] = count; // This is used to set count to 0 words that appear only on one side
                }
            }

            // Compute ratios

            foreach (var word in vocab.Keys)
            {
                if (vocab[word] > 50)
                {
                    var ratio = positive_counts[word] / (negative_counts[word] + 1.0);
                    if (ratio > 1.0)
                    {
                        pos_neg_ratios[word] = Math.Log(ratio);
                    }
                    else
                    {
                        pos_neg_ratios[word] = -Math.Log(1.0 / (ratio + 0.01));
                    }
                }
                else
                {
                    pos_neg_ratios[word] = 0.0;
                }
            }

            var review_vocab = vocab.Where(o => o.Value > min_count && Math.Abs(pos_neg_ratios[o.Key]) > polarity_cutoff).Select(o => o.Key).ToList();

            // Create word to index map

            var wordToIndex = review_vocab.Select((word, index) => new { word, index }).ToDictionary(o => o.word, o => o.index);

            // Build network

            var network = new Net <double>();

            network.AddLayer(new InputLayer(1, 1, review_vocab.Count));
            network.AddLayer(new FullyConnLayer(10));
            network.AddLayer(new FullyConnLayer(1));
            network.AddLayer(new RegressionLayer());

            // Training

            var trainer = new SgdTrainer(network)
            {
                LearningRate = 0.005
            };

            var input  = BuilderInstance.Volume.SameAs(new Shape(1, 1, review_vocab.Count));
            var output = BuilderInstance.Volume.SameAs(new Shape(1, 1, 1));

            var i       = 0;
            var correct = 0;

            for (var epoch = 0; epoch < 3; epoch++)
            {
                Console.WriteLine($"Epoch #{epoch}");

                foreach (var pair in reviews.Zip(labels, (review, label) => new { review, label }))
                {
                    var review = pair.review;
                    var label  = pair.label;
                    FillVolume(input, review, wordToIndex);

                    output.Set(0, 0, 0, pair.label == "positive" ? 1.0 : 0.0);

                    var test = network.Forward(input);
                    if (test > 0.5 && label == "positive" || test < 0.5 && label == "negative")
                    {
                        correct++;
                    }

                    trainer.Train(input, output);

                    if (i % 100 == 0)
                    {
                        Console.WriteLine($"Accuracy: {Math.Round(correct / (double)i * 100.0, 2)}%");
                        Console.WriteLine($"{i}/{reviews.Length}");
                    }

                    i++;
                    if (Console.KeyAvailable)
                    {
                        break;
                    }
                }
            }

            // Save Network

            File.WriteAllText(@"../../../../Model/sentiment.json", network.ToJson());
        }
Exemple #21
0
        public void TrainWithExperienceReplay(int numGames, int batchSize, float initialRandomChance, bool degradeRandomChance = true, string saveToFile = null)
        {
            var gamma  = 0.975f;
            var buffer = batchSize * 2;
            var h      = 0;

            //# Stores tuples of (S, A, R, S')
            var replay = new List <object[]>();

            _trainer = new SgdTrainer(Net)
            {
                LearningRate = 0.01, Momentum = 0.0, BatchSize = batchSize, L2Decay = 0.001
            };

            var startTime = DateTime.Now;
            var batches   = 0;

            for (var i = 0; i < numGames; i++)
            {
                World = GridWorld.RandomPlayerState();
                var gameMoves = 0;

                double updatedReward;
                var    gameRunning = true;
                while (gameRunning)
                {
                    //# We are in state S
                    //# Let's run our Q function on S to get Q values for all possible actions
                    var state  = GetInputs();
                    var qVal   = Net.Forward(state);
                    var action = 0;

                    if (Util.Rnd.NextDouble() < initialRandomChance)
                    {
                        //# Choose random action
                        action = Util.Rnd.Next(NumActions);
                    }
                    else
                    {
                        //# Choose best action from Q(s,a) values
                        action = MaxValueIndex(qVal);
                    }

                    //# Take action, observe new state S'
                    World.MovePlayer(action);
                    gameMoves++;
                    TotalTrainingMoves++;
                    var newState = GetInputs();

                    //# Observe reward, limit turns
                    var reward = World.GetReward();
                    gameRunning = !World.GameOver();

                    //# Experience replay storage
                    if (replay.Count < buffer)
                    {
                        replay.Add(new[] { state, (object)action, (object)reward, newState });
                    }
                    else
                    {
                        h         = (h < buffer - 1) ? h + 1 : 0;
                        replay[h] = new[] { state, (object)action, (object)reward, newState };
                        batches++;
                        var batchInputValues  = new Volume[batchSize];
                        var batchOutputValues = new List <double>();

                        //# Randomly sample our experience replay memory
                        for (var b = 0; b < batchSize; b++)
                        {
                            var memory      = replay[Util.Rnd.Next(buffer)];
                            var oldState    = (Volume)memory[0];
                            var oldAction   = (int)memory[1];
                            var oldReward   = (int)memory[2];
                            var oldNewState = (Volume)memory[3];

                            //# Get max_Q(S',a)
                            var newQ = Net.Forward(oldNewState);
                            var y    = GetValues(newQ);
                            var maxQ = MaxValue(newQ);

                            if (oldReward == GridWorld.ProgressScore)
                            {
                                //# Non-terminal state
                                updatedReward = (oldReward + (gamma * maxQ));
                            }
                            else
                            {
                                //# Terminal state
                                updatedReward = oldReward;
                            }

                            //# Target output
                            y[action] = updatedReward;

                            //# Store batched states
                            batchInputValues[b] = oldState;
                            batchOutputValues.AddRange(y);
                        }
                        Console.Write(".");

                        //# Train in batches with multiple scores and actions
                        _trainer.Train(batchOutputValues.ToArray(), batchInputValues);
                        TotalLoss += _trainer.Loss;
                    }
                }
                Console.WriteLine($"{(World.GetReward() == GridWorld.WinScore ? " WON!" : string.Empty)}");
                Console.Write($"Game: {i + 1}");
                TotalTrainingGames++;

                // Save every 10 games...
                if (!string.IsNullOrEmpty(saveToFile) && (i % 10 == 0))
                {
                    Util.SaveBrainToFile(this, saveToFile);
                }

                //# Optinoally: slowly reduce the chance of choosing a random action
                if (degradeRandomChance && initialRandomChance > 0.05f)
                {
                    initialRandomChance -= (1f / numGames);
                }
            }
            var duration = (DateTime.Now - startTime);

            LastLoss      = _trainer.Loss;
            TrainingTime += duration;

            if (!string.IsNullOrEmpty(saveToFile))
            {
                Util.SaveBrainToFile(this, saveToFile);
            }

            Console.WriteLine($"\nAvg loss: {TotalLoss / TotalTrainingMoves}. Last: {LastLoss}");
            Console.WriteLine($"Training duration: {duration}. Total: {TrainingTime}");
        }
Exemple #22
0
        private void Main()
        {
            BuilderInstance <double> .Volume = new VolumeBuilder();

            modelPath = projectPath + "/model/";
            dataPath  = projectPath + "/History/";

            while (true)
            {
                Console.WriteLine("학습할 데이터를 입력.");
                var line = Console.ReadLine();

                if (File.Exists(dataPath + line))
                {
                    try
                    {
                        data  = CSVToList.Read(dataPath + line);
                        parms = line.Replace(".csv", "_");
                        break;
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine("올바른 파일을 입력.");
                    }
                }
                else
                {
                    Console.WriteLine("파일이 존재하지 않습니다.");
                }
            }

            dataSize    = data[0].Count;
            screenDepth = dataSize * range + 1;

            mainNet   = new Net <double>();
            targetNet = new Net <double>();

            BuildNetwork(mainNet);
            BuildNetwork(targetNet);


            trainer = new SgdTrainer(mainNet)
            {
                LearningRate = 0.01, BatchSize = batchSize
            };
            StringBuilder log = new StringBuilder();

            parms += DateTime.Now.ToString("yyyy_MM_dd_HH_mm_ss");
            do
            {
                epoch++;
                double totalProfit = 0.0, totalGain = 0.0, totalLoss = 0.0, totalSpent = 0.0;
                for (int fromTime = 0; data.Count > fromTime + range; fromTime++)
                {
                    episode++;

                    var toTime = fromTime + range;
                    var input  = GetData(fromTime, toTime);
                    input.Add(inventory.Count);
                    var inputVol = ToVolume(input);
                    var result   = mainNet.Forward(inputVol);

                    var state = new State();
                    state.state  = input;
                    state.action = GetAction(result, false);
                    state.done   = toTime < data.Count ? false : true;

                    if (state.action == BUY)
                    {
                        state.price = data[toTime][LOW];
                        inventory.Add(state);

                        totalSpent += state.price;
                    }
                    else if (state.action == SELL)
                    {
                        state.price = data[toTime][HIGH];

                        var reward = 0.0;
                        inventory.ForEach(i => reward += (i.price - state.price));
                        inventory.Clear();
                        state.reward = reward;

                        totalProfit += reward;

                        if (reward > 0)
                        {
                            totalGain += reward;
                        }
                        else
                        {
                            totalLoss += reward;
                        }
                    }
                    else if (state.action == HOLD)
                    {
                        state.price = data[toTime][LOW];
                    }

                    var nextInput = state.done ? input : GetData(fromTime + 1, toTime + 1);
                    nextInput.Add(inventory.Count);
                    state.next_state = nextInput;
                    stateMemory.Add(state);

                    if (stateMemory.Count > batchSize)
                    {
                        ExperienceReplay();
                    }

                    if (episode % 10 == 0 && age > 0)
                    {
                        targetNet = SerializationExtensions.FromJson <double>(mainNet.ToJson());
                    }

                    var _loss = averageLossMemory.Count > 0 ? averageLossMemory.Average() : 0;
                    log.AppendLine($"{epoch},{episode},{state.action},{state.price},{totalSpent},{totalGain},{totalLoss},{totalProfit},{totalProfit / fromTime},{age},{_loss},{_loss / batchSize}");

                    if (episode % 1000 == 0)
                    {
                        Console.WriteLine($"{parms}\n" +
                                          $"epoch: {epoch}\n" +
                                          $"episode: {episode}\n" +
                                          $"totalGain: {totalGain}\n" +
                                          $"totalLoss: {totalLoss}\n" +
                                          $"totalSpent: {totalSpent}\n" +
                                          $"totalProfit: {totalProfit}\n" +
                                          $"av.Profit: {totalProfit / fromTime}\n" +
                                          $"av.Loss: {(averageLossMemory.Count > 0 ? averageLossMemory.Average() : 0)}");

                        File.AppendAllText(projectPath + "/log/log" + parms + ".log", log.ToString());
                        log.Clear();
                        File.AppendAllText(projectPath + "/log/loss" + parms + ".log", lossWriter.ToString());
                        lossWriter.Clear();
                        File.WriteAllText(projectPath + "/model/main" + parms + ".model", mainNet.ToJson());
                        File.WriteAllText(projectPath + "/model/target" + parms + ".model", targetNet.ToJson());
                    }
                }
                Console.WriteLine("------------------------------");
                Console.WriteLine($"epoch: {epoch}\n" +
                                  $"episode: {episode}\n" +
                                  $"totalGain: {totalGain}\n" +
                                  $"totalLoss: {totalLoss}\n" +
                                  $"totalSpent: {totalSpent}\n" +
                                  $"profit: {totalProfit}\n" +
                                  $"av.Profit: {totalProfit/dataSize}\n" +
                                  $"av.Loss: {(averageLossMemory.Count > 0 ? averageLossMemory.Average() : 0)}\n");
                Console.WriteLine("------------------------------");

                File.AppendAllText(projectPath + "/log/log" + parms + ".log", log.ToString());
                log.Clear();
                File.AppendAllText(projectPath + "/log/loss" + parms + ".log", lossWriter.ToString());
                lossWriter.Clear();

                inventory.Clear();
                stateMemory.Clear();
                averageLossMemory.Clear();

                qda.Net.SendMail.Send(subject: $"{parms}", body: $"epoch: {epoch}\n" +
                                      $"episode: {episode}\n" +
                                      $"totalGain: {totalGain}\n" +
                                      $"totalLoss: {totalLoss}\n" +
                                      $"totalSpent: {totalSpent}\n" +
                                      $"av.Profit: {totalProfit}\n" +
                                      $"av.Loss: {(averageLossMemory.Count > 0 ? averageLossMemory.Average() : 0)}\n");
            } while (true);
        }
Exemple #23
0
        private void MnistDemo(bool creatNew = true, int trainId = 1)
        {
            var datasets = new DataSets();

            if (!datasets.Load(100))
            {
                return;
            }

            // Create network
            if (creatNew)
            {
                this._net = new Net <double>();
                this._net.AddLayer(new InputLayer(28, 28, 1));
                this._net.AddLayer(new ConvLayer(5, 5, 8)
                {
                    Stride = 1, Pad = 2
                });
                this._net.AddLayer(new ReluLayer());
                this._net.AddLayer(new PoolLayer(2, 2)
                {
                    Stride = 2
                });
                this._net.AddLayer(new ConvLayer(5, 5, 16)
                {
                    Stride = 1, Pad = 2
                });
                this._net.AddLayer(new ReluLayer());
                this._net.AddLayer(new PoolLayer(3, 3)
                {
                    Stride = 3
                });
                this._net.AddLayer(new FullyConnLayer(10));
                this._net.AddLayer(new SoftmaxLayer(10));
            }
            else
            {
                HttpClient httpClient = new HttpClient();
                var        res        = httpClient.GetStringAsync($"{url}/api/nets/net/{trainId}").Result;
                var        net        = JsonConvert.DeserializeObject <Net>(res);
                this._net = SerializationExtensions.FromJson <double>(net.NetText);
            }
            this._trainer = new SgdTrainer <double>(this._net)
            {
                LearningRate = 0.01,
                BatchSize    = 20,
                L2Decay      = 0.001,
                Momentum     = 0.9
            };

            Console.WriteLine("Convolutional neural network learning...[Press any key to stop]");
            do
            {
                var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize);
                Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                var testSample = datasets.Test.NextBatch(this._trainer.BatchSize);
                Test(testSample.Item1, testSample.Item3, this._testAccWindow);

                Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss,
                                  Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2),
                                  Math.Round(this._testAccWindow.Items.Average() * 100.0, 2));

                Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms", this._stepCount,
                                  Math.Round(this._trainer.ForwardTimeMs, 2),
                                  Math.Round(this._trainer.BackwardTimeMs, 2));
            } while (!Console.KeyAvailable);

            //训练结果上传到Service中
            Task.Run(() =>
            {
                var step = 3;
                var json = _net.ToJson();
                while (step > 0)
                {
                    var client = new HttpClient();
                    var x      = client.PostAsync(@"{url}/api/nets/AddNet",
                                                  new StringContent(JsonConvert.SerializeObject(new { NetText = json }), Encoding.UTF8, "application/json"));
                    x.Wait();
                    if (x.Result.StatusCode == System.Net.HttpStatusCode.OK)
                    {
                        break;
                    }
                    step--;
                }
            }).Wait();
        }
Exemple #24
0
        private void MnistDemo()
        {
            BuilderInstance <float> .Volume = new ConvNetSharp.Volume.GPU.Single.VolumeBuilder();

            var datasets = new DataSets();

            if (!datasets.Load(100))
            {
                return;
            }

            // Create network
            this._net = new Net <float>();
            this._net.AddLayer(new InputLayer <float>());
            this._net.AddLayer(new ConvLayer <float>(5, 5, 8)
            {
                Stride = 1, Pad = 2, BiasPref = 0.1f
            });
            this._net.AddLayer(new ReluLayer <float>());
            this._net.AddLayer(new PoolLayer <float>(2, 2)
            {
                Stride = 2
            });
            this._net.AddLayer(new ConvLayer <float>(5, 5, 16)
            {
                Stride = 1, Pad = 2, BiasPref = 0.1f
            });
            this._net.AddLayer(new ReluLayer <float>());
            this._net.AddLayer(new PoolLayer <float>(3, 3)
            {
                Stride = 3
            });
            this._net.AddLayer(new FullyConnLayer <float>(10));
            this._net.AddLayer(new SoftmaxLayer <float>());

            // Fluent version
            //this._net = Net<float>.Create()
            //           .Conv(5, 5, 8).Stride(1).Pad(2)
            //           .Relu()
            //           .Pool(2, 2).Stride(2)
            //           .Conv(5, 5, 16).Stride(1).Pad(2)
            //           .Relu()
            //           .Pool(3, 3).Stride(3)
            //           .FullyConn(10)
            //           .Softmax()
            //           .Build();

            this._trainer = new SgdTrainer <float>(this._net, 0.01f)
            {
                BatchSize = 1024,
                //L2Decay = 0.001f,
                //Momentum = 0.9f
            };

            if (File.Exists("loss.csv"))
            {
                File.Delete("loss.csv");
            }

            Console.WriteLine("Convolutional neural network learning...[Press any key to stop]");
            do
            {
                var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize);
                Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                var testSample = datasets.Test.NextBatch(this._trainer.BatchSize);
                Test(testSample.Item1, testSample.Item3, this._testAccWindow);

                Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss,
                                  Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2),
                                  Math.Round(this._testAccWindow.Items.Average() * 100.0, 2));

                Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms Updt: {3}ms", this._stepCount,
                                  Math.Round(this._trainer.ForwardTimeMs, 2),
                                  Math.Round(this._trainer.BackwardTimeMs, 2),
                                  Math.Round(this._trainer.UpdateWeightsTimeMs, 2));

                File.AppendAllLines("loss.csv", new[] { $"{this._stepCount}, {this._trainer.Loss}, { Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2)}, {Math.Round(this._testAccWindow.Items.Average() * 100.0, 2)}" });
            } while (!Console.KeyAvailable);


            // Display graph
            //var vm = new ViewModel<float>(_net.Op);
            //var app = new Application();
            //app.Run(new GraphControl { DataContext = vm });

            this._net.Dispose();
            this._trainer.Dispose();
        }
Exemple #25
0
        public void Train(int numGames, float initialRandomChance)
        {
            var gamma = 0.9f;

            _trainer = new SgdTrainer(Net)
            {
                LearningRate = 0.01, Momentum = 0.0, BatchSize = 1, L2Decay = 0.001
            };
            var startTime = DateTime.Now;

            for (var i = 0; i < numGames; i++)
            {
                World = GridWorld.StandardState();

                double updatedReward;
                var    gameRunning = true;
                var    gameMoves   = 0;
                while (gameRunning)
                {
                    //# We are in state S
                    //# Let's run our Q function on S to get Q values for all possible actions
                    var state  = GetInputs();
                    var qVal   = Net.Forward(state);
                    var action = 0;

                    if (Util.Rnd.NextDouble() < initialRandomChance)
                    {
                        //# Choose random action
                        action = Util.Rnd.Next(NumActions);
                    }
                    else
                    {
                        //# Choose best action from Q(s,a) values
                        action = MaxValueIndex(qVal);
                    }

                    //# Take action, observe new state S'
                    World.MovePlayer(action);
                    gameMoves++;
                    TotalTrainingMoves++;
                    var newState = GetInputs();

                    //# Observe reward
                    var reward = World.GetReward();
                    gameRunning = !World.GameOver();

                    //# Get max_Q(S',a)
                    var newQ = Net.Forward(newState);
                    var y    = GetValues(newQ);
                    var maxQ = MaxValue(newQ);

                    if (gameRunning)
                    {
                        //# Non-terminal state
                        updatedReward = (reward + (gamma * maxQ));
                    }
                    else
                    {
                        //# Terminal state
                        updatedReward = reward;
                        TotalTrainingGames++;
                        Console.WriteLine($"Game: {TotalTrainingGames}. Moves: {gameMoves}. {(reward == 10 ? "WIN!" : "")}");
                    }

                    //# Target output
                    y[action] = updatedReward;

                    //# Feedback what the score would be for this action
                    _trainer.Train(state, y);
                    TotalLoss += _trainer.Loss;
                }

                //# Slowly reduce the chance of choosing a random action
                if (initialRandomChance > 0.05f)
                {
                    initialRandomChance -= (1f / numGames);
                }
            }
            var duration = (DateTime.Now - startTime);

            LastLoss      = _trainer.Loss;
            TrainingTime += duration;

            Console.WriteLine($"Avg loss: {TotalLoss / TotalTrainingMoves}. Last: {LastLoss}");
            Console.WriteLine($"Training duration: {duration}. Total: {TrainingTime}");
        }
Exemple #26
0
        // Use this for initialization
        public QLearning()
        {
            exp  = new Experience[experienceSize];
            expi = 0;
            expn = 0;
            t    = 0;
            r0   = -99f;

            // species a 2-layer neural network with one hidden layer of 20 neurons
            net = new Net();

            // input layer declares size of input. here: 2-D data
            // ConvNetSharp works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
            // then the first two dimensions (width, height) will always be kept at size 1
            net.AddLayer(new InputLayer(1, 1, numStates));

            // declare 20 neurons, followed by ReLU (rectified linear unit non-linearity)
            net.AddLayer(new FullyConnLayer(hiddenNeurons - 10, Activation.Relu));

            //snet.AddLayer(new FullyConnLayer(hiddenNeurons/4, Activation.Relu));

            // declare the linear classifier on top of the previous hidden layer
            net.AddLayer(new RegressionLayer(numActions));

            Debug.Log("Network initialized");


            // species a 2-layer neural network with one hidden layer of 20 neurons
            netClassify = new Net();

            // input layer declares size of input. here: 2-D data
            // ConvNetSharp works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
            // then the first two dimensions (width, height) will always be kept at size 1
            netClassify.AddLayer(new InputLayer(1, 1, 2));

            // declare 20 neurons, followed by ReLU (rectified linear unit non-linearity)
            netClassify.AddLayer(new FullyConnLayer(4, Activation.Relu));

            //snet.AddLayer(new FullyConnLayer(hiddenNeurons/4, Activation.Relu));

            // declare the linear classifier on top of the previous hidden layer
            netClassify.AddLayer(new SoftmaxLayer(2));

            Debug.Log("Network Classify initialized");

            /*
             * List<double> list = new List<double>();
             *
             * list = netToList(net);
             *
             * outputList(list, "agent1");
             *
             *
             * ListToNet(net, list);
             *
             * List<double> list2 = new List<double>();
             *
             * list2 = netToList(net);
             *
             * list2[1] = 0.5f;
             *
             * outputList(list2, "agent2");
             *
             */



            //double[] weights = { 0.3, -0.5, 0.1, 0.9, 0.6 };



            // forward a random data point through the network
            //var x = new Volume(weights);

            //var prob = net.Forward(x);

            // prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
            //Debug.Log("probability that x is class 0: " + prob.Weights[0]); // prints e.g. 0.50101

            trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01, L2Decay = 0.001, Momentum = 0.0, BatchSize = 5
            };

            //trainer.Train(x, 0); // train the network, specifying that x is class zero

            // Volume prob2 = net.Forward(x);

            //Debug.Log("probability that x is class 0: " + prob2.Weights[0]);
            // now prints 0.50374, slightly higher than previous 0.50101: the networks
            // weights have been adjusted by the Trainer to give a higher probability to
            // the class we trained the network with (zero)

            e = new Entropy();

            q = new Quartiles();

            double[] arr = new double[8] {
                5, 6, 7, 2, 1, 8, 4, 3
            };

            double[] ascOrderedArray = (from i in arr orderby i ascending select i).ToArray();

            Debug.Log(q.umidmean(ascOrderedArray));

            Debug.Log(q.lmidmean(ascOrderedArray));
        }
Exemple #27
0
        public void MnistDemo()
        {
            var datasets = new Datasets();

            if (!datasets.Load(10))
            {
                return;
            }

            // Create network
            this._net = new Net <double>();
            //var json2 = File.ReadAllText(@"D:\TA171801038\Expression Recognition\Alpha1\Alpha1\mynetwork.json");
            //net = SerializationExtensions.FromJson<double>(json2);
            this._net.AddLayer(new InputLayer(48, 48, 1));
            this._net.AddLayer(new ConvLayer(3, 3, 8)
            {
                Stride = 1, Pad = 2
            });
            this._net.AddLayer(new ReluLayer());
            this._net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            this._net.AddLayer(new ConvLayer(3, 3, 16)
            {
                Stride = 1, Pad = 2
            });
            this._net.AddLayer(new ReluLayer());
            this._net.AddLayer(new PoolLayer(3, 3)
            {
                Stride = 3
            });
            this._net.AddLayer(new FullyConnLayer(5));
            this._net.AddLayer(new SoftmaxLayer(5));

            this._trainer = new SgdTrainer <double>(_net)
            {
                LearningRate = 0.01,
                BatchSize    = 20,
                L2Decay      = 0.001,
                Momentum     = 0.9
            };

            Console.WriteLine("Convolutional neural network learning...[Press any key to stop]");
            do
            {
                var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize);
                Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                var testSample = datasets.Test.NextBatch(this._trainer.BatchSize);
                Test(testSample.Item1, testSample.Item3, this._testAccWindow);

                Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss,
                                  Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2),
                                  Math.Round(this._testAccWindow.Items.Average() * 100.0, 2));

                Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms", this._stepCount,
                                  Math.Round(this._trainer.ForwardTimeMs, 2),
                                  Math.Round(this._trainer.BackwardTimeMs, 2));
            } while (!Console.KeyAvailable);


            var json = _net.ToJson();

            System.IO.File.WriteAllText(@"..\..\..\Network\frnetwork.json", json);
            //Console.WriteLine(json);
            // Console.ReadLine();

            Console.WriteLine("-------------------------------------------------------");

            //var json2 = File.ReadAllText(@"D:\TA171801038\Expression Recognition\Alpha1\Alpha1\mynetwork.json");
            //Console.WriteLine(json2);

            /* Net<double> net = SerializationExtensions.FromJson<double>(json2);
             * var json3 = net.ToJson();
             *
             * if (json == json3)
             * {
             *   Console.WriteLine("same");
             * }else
             * {
             *   Console.WriteLine("different");
             * }*/

            Console.ReadLine();
        }
Exemple #28
0
        private static void Classify2DDemo()
        {
            var net = new Net <double>();

            net.AddLayer(new InputLayer(1, 1, 2));
            net.AddLayer(new FullyConnLayer(6));
            net.AddLayer(new TanhLayer());
            net.AddLayer(new FullyConnLayer(2));
            net.AddLayer(new TanhLayer());
            net.AddLayer(new FullyConnLayer(2));
            net.AddLayer(new SoftmaxLayer(2));

            // Data
            var data   = new List <double[]>();
            var labels = new List <int>();

            data.Add(new[] { -0.4326, 1.1909 });
            labels.Add(1);
            data.Add(new[] { 3.0, 4.0 });
            labels.Add(1);
            data.Add(new[] { 0.1253, -0.0376 });
            labels.Add(1);
            data.Add(new[] { 0.2877, 0.3273 });
            labels.Add(1);
            data.Add(new[] { -1.1465, 0.1746 });
            labels.Add(1);
            data.Add(new[] { 1.8133, 1.0139 });
            labels.Add(0);
            data.Add(new[] { 2.7258, 1.0668 });
            labels.Add(0);
            data.Add(new[] { 1.4117, 0.5593 });
            labels.Add(0);
            data.Add(new[] { 4.1832, 0.3044 });
            labels.Add(0);
            data.Add(new[] { 1.8636, 0.1677 });
            labels.Add(0);
            data.Add(new[] { 0.5, 3.2 });
            labels.Add(1);
            data.Add(new[] { 0.8, 3.2 });
            labels.Add(1);
            data.Add(new[] { 1.0, -2.2 });
            labels.Add(1);
            var n = labels.Count;

            var trainer = new SgdTrainer <double>(net)
            {
                LearningRate = 0.01, L2Decay = 0.001, BatchSize = n
            };

            // Training
            do
            {
                Classify2DUpdate(n, data, trainer, labels);
            } while (!Console.KeyAvailable);

            // Testing
            var netx = new Volume(new double[2 * n], new Shape(1, 1, 2, n));

            for (var ix = 0; ix < n; ix++)
            {
                netx.Set(0, 0, 0, ix, data[ix][0]);
                netx.Set(0, 0, 1, ix, data[ix][1]);
            }

            var result   = net.Forward(netx);
            var c        = net.GetPrediction();
            var accurate = c[0] == labels[0];
        }
Exemple #29
0
        public void CompareCoreVsFlow()
        {
            var inputWidth  = 28;
            var inputHeigth = 28;
            var inputDepth  = 3;
            var batchSize   = 20;

            #region Flow network

            var netFlow = new Net <T>();
            netFlow.AddLayer(new InputLayer <T>());
            var convLayerFlow1 = new ConvLayer <T>(5, 5, 8)
            {
                BiasPref = (T)Convert.ChangeType(0.1, typeof(T)), Stride = 1, Pad = 2
            };
            netFlow.AddLayer(convLayerFlow1);
            netFlow.AddLayer(new ReluLayer <T>());
            netFlow.AddLayer(new PoolLayer <T>(2, 2)
            {
                Stride = 2
            });
            var fullyConnLayerFlow = new FullyConnLayer <T>(10);
            netFlow.AddLayer(fullyConnLayerFlow);
            netFlow.AddLayer(new SoftmaxLayer <T>());

            var trainerFlow = new SgdTrainer <T>(netFlow, (T)Convert.ChangeType(0.01f, typeof(T)))
            {
                BatchSize = batchSize
            };

            #endregion

            #region Core network

            var netCore = new Core.Net <T>();
            netCore.AddLayer(new Core.Layers.InputLayer <T>(inputWidth, inputHeigth, inputDepth));
            var convLayerCore1 = new Core.Layers.ConvLayer <T>(5, 5, 8)
            {
                BiasPref = (T)Convert.ChangeType(0.1, typeof(T)), Stride = 1, Pad = 2
            };
            netCore.AddLayer(convLayerCore1);
            netCore.AddLayer(new Core.Layers.ReluLayer <T>());
            netCore.AddLayer(new Core.Layers.PoolLayer <T>(2, 2)
            {
                Stride = 2
            });
            var fullyConnLayerCore = new Core.Layers.FullyConnLayer <T>(10);
            netCore.AddLayer(fullyConnLayerCore);
            netCore.AddLayer(new Core.Layers.SoftmaxLayer <T>(10));

            var trainerCore = new Core.Training.SgdTrainer <T>(netCore)
            {
                LearningRate = (T)Convert.ChangeType(0.01f, typeof(T)),
                BatchSize    = batchSize
            };

            #endregion

            // Same weights
            var convfilterCore1 = netFlow.Session.GetVariableByName(netFlow.Op, (convLayerFlow1.Filter as IPersistable <T>).Name);
            convfilterCore1.Result = BuilderInstance <T> .Volume.SameAs(convLayerCore1.Filters.ToArray(), convLayerCore1.Filters.Shape);

            var fullyfilterCore = netFlow.Session.GetVariableByName(netFlow.Op, (fullyConnLayerFlow.Filter as IPersistable <T>).Name);
            fullyfilterCore.Result = BuilderInstance <T> .Volume.SameAs(fullyConnLayerCore.Filters.ToArray(), fullyConnLayerCore.Filters.Shape);

            // Create input
            var xStorage = new double[inputWidth * inputHeigth * inputDepth * batchSize].Populate(1.0);
            var x        = NewVolume(xStorage, Volume.Shape.From(inputWidth, inputHeigth, inputDepth, batchSize));

            // Create output
            var yStorage = new double[10 * batchSize];
            var y        = NewVolume(yStorage, Volume.Shape.From(1, 1, 10, batchSize));
            for (var i = 0; i < batchSize; i++)
            {
                y.Set(0, 0, i % 10, i, Ops <T> .One);
            }

            for (var k = 0; k < 10; k++)
            {
                xStorage = new double[inputWidth * inputHeigth * inputDepth * batchSize].Populate(1.0 + k);
                x        = NewVolume(xStorage, Volume.Shape.From(inputWidth, inputHeigth, inputDepth, batchSize));

                var flowResult = netFlow.Forward(x);
                var coreResult = netCore.Forward(x);

                var sum1 = BuilderInstance <T> .Volume.SameAs(new Shape(1));

                flowResult.DoSum(sum1);
                var sum2 = BuilderInstance <T> .Volume.SameAs(new Shape(1));

                coreResult.DoSum(sum2);
                var diff = Ops <T> .Subtract(sum1.Get(0), sum2.Get(0));

                Console.WriteLine(diff);

                AssertNumber.AreSequenceEqual(flowResult.ToArray(), coreResult.ToArray(), 1e-6);

                trainerCore.Train(x, y);
                trainerFlow.Train(x, y);
            }
        }