Beispiel #1
0
        private static void Test(string dataset, string model)
        {
            try
            {
                IList <Matrix <RgbPixel> > trainingImages;
                IList <uint> trainingLabels;
                IList <Matrix <RgbPixel> > testingImages;
                IList <uint> testingLabels;

                Console.WriteLine("Start load train images");
                Load("train", dataset, null, out trainingImages, out trainingLabels);
                Console.WriteLine($"Load train images: {trainingImages.Count}");

                Console.WriteLine("Start load test images");
                Load("test", dataset, null, out testingImages, out testingLabels);
                Console.WriteLine($"Load test images: {testingImages.Count}");

                // So with that out of the way, we can make a network instance.
                var trainNet  = NativeMethods.LossMulticlassLog_age_train_type_create();
                var networkId = LossMulticlassLogRegistry.GetId(trainNet);
                LossMulticlassLogRegistry.Add(trainNet);

                using (var net = LossMulticlassLog.Deserialize(model, networkId))
                {
                    Validation("", net, trainingImages, trainingLabels, testingImages, testingLabels, true, false, out _, out _);
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
Beispiel #2
0
        protected override int SetupNetwork()
        {
            var trainNet  = NativeMethods.LossMulticlassLog_emotion_train_type_create();
            var networkId = LossMulticlassLogRegistry.GetId(trainNet);

            LossMulticlassLogRegistry.Add(trainNet);
            return(networkId);
        }
Beispiel #3
0
        /// <summary>
        /// Initializes a new instance of the <see cref="SimpleAgeEstimator"/> class with the model file path that this estimator uses.
        /// </summary>
        /// <param name="modelPath">The model file path that this estimator uses.</param>
        /// <exception cref="FileNotFoundException">The model file is not found.</exception>
        public SimpleAgeEstimator(string modelPath)
        {
            if (!File.Exists(modelPath))
            {
                throw new FileNotFoundException(modelPath);
            }

            var ret       = NativeMethods.LossMulticlassLog_age_train_type_create();
            var networkId = LossMulticlassLogRegistry.GetId(ret);

            if (LossMulticlassLogRegistry.Contains(networkId))
            {
                NativeMethods.LossMulticlassLog_age_train_type_delete(ret);
            }
            else
            {
                LossMulticlassLogRegistry.Add(ret);
            }

            this._Network = LossMulticlassLog.Deserialize(modelPath, networkId);
        }
Beispiel #4
0
        private static void Train(string baseName, string dataset, uint epoch, double learningRate, double minLearningRate, uint miniBatchSize, uint validation, bool useMean)
        {
            try
            {
                IList <Matrix <RgbPixel> > trainingImages;
                IList <uint> trainingLabels;
                IList <Matrix <RgbPixel> > testingImages;
                IList <uint> testingLabels;

                var mean = useMean ? Path.Combine(dataset, "train.mean.bmp") : null;

                Console.WriteLine("Start load train images");
                Load("train", dataset, mean, out trainingImages, out trainingLabels);
                Console.WriteLine($"Load train images: {trainingImages.Count}");

                Console.WriteLine("Start load test images");
                Load("test", dataset, mean, out testingImages, out testingLabels);
                Console.WriteLine($"Load test images: {testingImages.Count}");

                // So with that out of the way, we can make a network instance.
                var trainNet  = NativeMethods.LossMulticlassLog_age_train_type_create();
                var networkId = LossMulticlassLogRegistry.GetId(trainNet);
                LossMulticlassLogRegistry.Add(trainNet);

                using (var net = new LossMulticlassLog(networkId))
                    using (var trainer = new DnnTrainer <LossMulticlassLog>(net))
                    {
                        trainer.SetLearningRate(learningRate);
                        trainer.SetMinLearningRate(minLearningRate);
                        trainer.SetMiniBatchSize(miniBatchSize);
                        trainer.BeVerbose();
                        trainer.SetSynchronizationFile(baseName, 180);

                        // create array box
                        var trainingImagesCount = trainingImages.Count;
                        var trainingLabelsCount = trainingLabels.Count;

                        var maxIteration = (int)Math.Ceiling(trainingImagesCount / (float)miniBatchSize);
                        var imageBatches = new Matrix <RgbPixel> [maxIteration][];
                        var labelBatches = new uint[maxIteration][];
                        for (var i = 0; i < maxIteration; i++)
                        {
                            if (miniBatchSize <= trainingImagesCount - i * miniBatchSize)
                            {
                                imageBatches[i] = new Matrix <RgbPixel> [miniBatchSize];
                                labelBatches[i] = new uint[miniBatchSize];
                            }
                            else
                            {
                                imageBatches[i] = new Matrix <RgbPixel> [trainingImagesCount % miniBatchSize];
                                labelBatches[i] = new uint[trainingLabelsCount % miniBatchSize];
                            }
                        }

                        using (var fs = new FileStream($"{baseName}.log", FileMode.Create, FileAccess.Write, FileShare.Write))
                            using (var sw = new StreamWriter(fs, Encoding.UTF8))
                                for (var e = 0; e < epoch; e++)
                                {
                                    var randomArray = Enumerable.Range(0, trainingImagesCount).OrderBy(i => Guid.NewGuid()).ToArray();
                                    var index       = 0;
                                    for (var i = 0; i < imageBatches.Length; i++)
                                    {
                                        var currentImages = imageBatches[i];
                                        var currentLabels = labelBatches[i];
                                        for (var j = 0; j < imageBatches[i].Length; j++)
                                        {
                                            var rIndex = randomArray[index];
                                            currentImages[j] = trainingImages[rIndex];
                                            currentLabels[j] = trainingLabels[rIndex];
                                            index++;
                                        }
                                    }

                                    for (var i = 0; i < maxIteration; i++)
                                    {
                                        LossMulticlassLog.TrainOneStep(trainer, imageBatches[i], labelBatches[i]);
                                    }

                                    var lr   = trainer.GetLearningRate();
                                    var loss = trainer.GetAverageLoss();

                                    var trainLog = $"Epoch: {e}, learning Rate: {lr}, average loss: {loss}";
                                    Console.WriteLine(trainLog);
                                    sw.WriteLine(trainLog);

                                    if (e > 0 && e % validation == 0)
                                    {
                                        Validation(baseName, net, trainingImages, trainingLabels, testingImages, testingLabels, false, false, out var trainAccuracy, out var testAccuracy);

                                        var validationLog = $"Epoch: {e}, train accuracy: {trainAccuracy}, test accuracy: {testAccuracy}";
                                        Console.WriteLine(validationLog);
                                        sw.WriteLine(validationLog);
                                    }

                                    if (lr < minLearningRate)
                                    {
                                        break;
                                    }
                                }

                        // wait for training threads to stop
                        trainer.GetNet();
                        Console.WriteLine("done training");

                        net.Clean();
                        LossMulticlassLog.Serialize(net, $"{baseName}.dat");

                        // Now let's run the training images through the network.  This statement runs all the
                        // images through it and asks the loss layer to convert the network's raw output into
                        // labels.  In our case, these labels are the numbers between 0 and 9.
                        Validation(baseName, net, trainingImages, trainingLabels, testingImages, testingLabels, true, true, out _, out _);
                    }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }