Esempio n. 1
0
        private void Test(Parameter parameter)
        {
            try
            {
                IList <Matrix <C> > trainingImages;
                IList <T>           trainingLabels;
                IList <Matrix <C> > testingImages;
                IList <T>           testingLabels;

                Logger.Info("Start load train images");
                Load(parameter.Dataset, "train", out trainingImages, out trainingLabels);
                Logger.Info($"Load train images: {trainingImages.Count}");

                Logger.Info("Start load test images");
                Load(parameter.Dataset, "test", out testingImages, out testingLabels);
                Logger.Info($"Load test images: {testingImages.Count}");
                Logger.Info("");

                // So with that out of the way, we can make a network instance.
                var networkId = SetupNetwork();

                using (var net = LossMulticlassLog.Deserialize(parameter.Model, networkId))
                {
                    this.SetEvalMode(networkId, net);
                    var validationParameter = new ValidationParameter <T, C>
                    {
                        BaseName       = Path.GetFileNameWithoutExtension(parameter.Model),
                        Trainer        = net,
                        TrainingImages = trainingImages,
                        TrainingLabels = trainingLabels,
                        TestingImages  = testingImages,
                        TestingLabels  = testingLabels,
                        UseConsole     = true,
                        SaveToXml      = false,
                        OutputDiffLog  = true,
                        Output         = Path.GetDirectoryName(parameter.Model)
                    };

                    Validation(validationParameter, out _, out _);
                }
            }
            catch (Exception e)
            {
                Logger.Error(e.Message);
            }
        }
Esempio n. 2
0
        private void Validation(ValidationParameter <T, C> parameter,
                                out double trainAccuracy,
                                out double testAccuracy)
        {
            trainAccuracy = 0;
            testAccuracy  = 0;

            var net            = parameter.Trainer;
            var trainingImages = parameter.TrainingImages;
            var trainingLabels = parameter.TrainingLabels;
            var testingImages  = parameter.TestingImages;
            var testingLabels  = parameter.TestingLabels;
            var saveToXml      = parameter.SaveToXml;
            var baseName       = parameter.BaseName;
            var useConsole     = parameter.UseConsole;

            using (var predictedLabels = net.Operator(trainingImages))
            {
                var numRight = 0;
                var numWrong = 0;

                // And then let's see if it classified them correctly.
                for (var i = 0; i < trainingImages.Count; ++i)
                {
                    if (this.Compare(predictedLabels[i], trainingLabels[i]))
                    {
                        ++numRight;
                    }
                    else
                    {
                        ++numWrong;
                    }
                }

                if (useConsole)
                {
                    Logger.Info($"training num_right: {numRight}");
                    Logger.Info($"training num_wrong: {numWrong}");
                    Logger.Info($"training accuracy:  {numRight / (double)(numRight + numWrong)}");
                }

                trainAccuracy = numRight / (double)(numRight + numWrong);

                using (var predictedLabels2 = net.Operator(testingImages))
                {
                    numRight = 0;
                    numWrong = 0;
                    for (var i = 0; i < testingImages.Count; ++i)
                    {
                        if (this.Compare(predictedLabels2[i], testingLabels[i]))
                        {
                            ++numRight;
                        }
                        else
                        {
                            ++numWrong;
                        }
                    }

                    if (useConsole)
                    {
                        Logger.Info($"testing num_right: {numRight}");
                        Logger.Info($"testing num_wrong: {numWrong}");
                        Logger.Info($"testing accuracy:  {numRight / (double)(numRight + numWrong)}");
                    }

                    testAccuracy = numRight / (double)(numRight + numWrong);

                    // Finally, you can also save network parameters to XML files if you want to do
                    // something with the network in another tool.  For example, you could use dlib's
                    // tools/convert_dlib_nets_to_caffe to convert the network to a caffe model.
                    if (saveToXml)
                    {
                        Dlib.NetToXml(net, $"{baseName}.xml");
                    }
                }
            }
        }
Esempio n. 3
0
        private void Train(Parameter parameter)
        {
            try
            {
                IList <Matrix <C> > trainingImages;
                IList <T>           trainingLabels;
                IList <Matrix <C> > testingImages;
                IList <T>           testingLabels;

                Logger.Info("Start load train images");
                Load(parameter.Dataset, "train", out trainingImages, out trainingLabels);
                Logger.Info($"Load train images: {trainingImages.Count}");

                Logger.Info("Start load test images");
                Load(parameter.Dataset, "test", out testingImages, out testingLabels);
                Logger.Info($"Load test images: {testingImages.Count}");
                Logger.Info("");

                // So with that out of the way, we can make a network instance.
                var networkId = SetupNetwork();

                using (var net = new LossMulticlassLog(networkId))
                    using (var solver = new Adam())
                        using (var trainer = new DnnTrainer <LossMulticlassLog>(net, solver))
                        {
                            var learningRate    = parameter.LearningRate;
                            var minLearningRate = parameter.MinLearningRate;
                            var miniBatchSize   = parameter.MiniBatchSize;
                            var baseName        = parameter.BaseName;
                            var epoch           = parameter.Epoch;
                            var validation      = parameter.Validation;

                            trainer.SetLearningRate(learningRate);
                            trainer.SetMinLearningRate(minLearningRate);
                            trainer.SetMiniBatchSize(miniBatchSize);
                            trainer.BeVerbose();
                            trainer.SetSynchronizationFile(baseName, 180);

                            // create array box
                            var trainingImagesCount = trainingImages.Count;
                            var trainingLabelsCount = trainingLabels.Count;

                            var maxIteration = (int)Math.Ceiling(trainingImagesCount / (float)miniBatchSize);
                            var imageBatches = new Matrix <C> [maxIteration][];
                            var labelBatches = new uint[maxIteration][];
                            for (var i = 0; i < maxIteration; i++)
                            {
                                if (miniBatchSize <= trainingImagesCount - i * miniBatchSize)
                                {
                                    imageBatches[i] = new Matrix <C> [miniBatchSize];
                                    labelBatches[i] = new uint[miniBatchSize];
                                }
                                else
                                {
                                    imageBatches[i] = new Matrix <C> [trainingImagesCount % miniBatchSize];
                                    labelBatches[i] = new uint[trainingLabelsCount % miniBatchSize];
                                }
                            }

                            using (var fs = new FileStream($"{baseName}.log", FileMode.Create, FileAccess.Write, FileShare.Write))
                                using (var sw = new StreamWriter(fs, Encoding.UTF8))
                                    for (var e = 0; e < epoch; e++)
                                    {
                                        var randomArray = Enumerable.Range(0, trainingImagesCount).OrderBy(i => Guid.NewGuid()).ToArray();
                                        var index       = 0;
                                        for (var i = 0; i < imageBatches.Length; i++)
                                        {
                                            var currentImages = imageBatches[i];
                                            var currentLabels = labelBatches[i];
                                            for (var j = 0; j < imageBatches[i].Length; j++)
                                            {
                                                var rIndex = randomArray[index];
                                                currentImages[j] = trainingImages[rIndex];
                                                currentLabels[j] = this.Cast(trainingLabels[rIndex]);
                                                index++;
                                            }
                                        }

                                        for (var i = 0; i < maxIteration; i++)
                                        {
                                            LossMulticlassLog.TrainOneStep(trainer, imageBatches[i], labelBatches[i]);
                                        }

                                        var lr   = trainer.GetLearningRate();
                                        var loss = trainer.GetAverageLoss();

                                        var trainLog = $"Epoch: {e}, learning Rate: {lr}, average loss: {loss}";
                                        Logger.Info(trainLog);
                                        sw.WriteLine(trainLog);

                                        if (e >= 0 && e % validation == 0)
                                        {
                                            var validationParameter = new ValidationParameter <T, C>
                                            {
                                                BaseName       = parameter.BaseName,
                                                Output         = parameter.Output,
                                                Trainer        = net,
                                                TrainingImages = trainingImages,
                                                TrainingLabels = trainingLabels,
                                                TestingImages  = testingImages,
                                                TestingLabels  = testingLabels,
                                                UseConsole     = true,
                                                SaveToXml      = true,
                                                OutputDiffLog  = true
                                            };

                                            Validation(validationParameter, out var trainAccuracy, out var testAccuracy);

                                            var validationLog = $"Epoch: {e}, train accuracy: {trainAccuracy}, test accuracy: {testAccuracy}";
                                            Logger.Info(validationLog);
                                            sw.WriteLine(validationLog);

                                            var name = this.GetBaseName(parameter.Epoch,
                                                                        parameter.LearningRate,
                                                                        parameter.MinLearningRate,
                                                                        parameter.MiniBatchSize);

                                            UpdateBestModelFile(net, testAccuracy, parameter.Output, name, "test");
                                            UpdateBestModelFile(net, trainAccuracy, parameter.Output, name, "train");
                                        }

                                        if (lr < minLearningRate)
                                        {
                                            Logger.Info($"Stop training: {lr} < {minLearningRate}");
                                            break;
                                        }
                                    }

                            // wait for training threads to stop
                            trainer.GetNet();
                            Logger.Info("done training");

                            net.Clean();
                            LossMulticlassLog.Serialize(net, $"{baseName}.tmp");

                            // Now let's run the training images through the network.  This statement runs all the
                            // images through it and asks the loss layer to convert the network's raw output into
                            // labels.  In our case, these labels are the numbers between 0 and 9.
                            var validationParameter2 = new ValidationParameter <T, C>
                            {
                                BaseName       = parameter.BaseName,
                                Output         = parameter.Output,
                                Trainer        = net,
                                TrainingImages = trainingImages,
                                TrainingLabels = trainingLabels,
                                TestingImages  = testingImages,
                                TestingLabels  = testingLabels,
                                UseConsole     = true,
                                SaveToXml      = true,
                                OutputDiffLog  = true
                            };

                            Validation(validationParameter2, out _, out _);

                            // clean up tmp files
                            Clean(parameter.Output);
                        }
            }
            catch (Exception e)
            {
                Logger.Error(e.Message);
            }
        }