Exemplo n.º 1
0
        public ImageDataFrame(SiaVariable feature, SiaVariable label)
        {
            features = feature.Shape.Dimensions.ToArray();
            labels   = label.Shape.Dimensions[0];

            counter = 0;
        }
Exemplo n.º 2
0
 public DataFrameTrainPredict(Function model, Function lossFunc, string lossName, Function metricFunc, string metricName, IList <Learner> learners, SiaVariable featureVariable, SiaVariable labelVariable)
 {
     this.Model           = model;
     this.lossFunc        = lossFunc;
     this.metricFunc      = metricFunc;
     this.learners        = learners;
     this.featureVariable = featureVariable;
     this.labelVariable   = labelVariable;
     this.metricName      = metricName;
     this.lossName        = lossName;
 }
Exemplo n.º 3
0
        internal void LoadTextData(CNTK.Variable feature, CNTK.Variable label)
        {
            int imageSize  = feature.Shape.Rank == 1 ? feature.Shape[0] : feature.Shape[0] * feature.Shape[1] * feature.Shape[2];
            int numClasses = label.Shape[0];
            IList <StreamConfiguration> streamConfigurations = new StreamConfiguration[] { new StreamConfiguration(featureStreamName, imageSize), new StreamConfiguration(labelsStreamName, numClasses) };

            miniBatchSource   = MinibatchSource.TextFormatMinibatchSource(FileName, streamConfigurations, MinibatchSource.InfinitelyRepeat);
            featureVariable   = feature;
            labelVariable     = label;
            featureStreamInfo = miniBatchSource.StreamInfo(featureStreamName);
            labelStreamInfo   = miniBatchSource.StreamInfo(labelsStreamName);
        }
Exemplo n.º 4
0
        public ImageDataFrame(SiaVariable feature, SiaVariable label, string folder, int resize = 0, int numberOfRandomRotation = 0, bool horizontalFlip = false, bool verticalFlip = false)
            : this(feature, label)
        {
            this.folder   = folder;
            fromFolder    = true;
            folderMapData = new List <ImageMapInfo>();
            DirectoryInfo dir        = new DirectoryInfo(folder);
            var           subfolders = dir.GetDirectories();
            int           counter    = 1;

            foreach (var item in subfolders)
            {
                var files = item.GetFiles().Select(x => (x.FullName)).ToList();
                foreach (var file in files)
                {
                    folderMapData.Add(new ImageMapInfo()
                    {
                        Filepath = file, Label = counter, RotationAngle = 0, Resize = resize
                    });
                    if (numberOfRandomRotation > 0)
                    {
                        for (int i = 0; i < numberOfRandomRotation; i++)
                        {
                            folderMapData.Add(new ImageMapInfo()
                            {
                                Filepath = file, Label = counter, RotationAngle = new Random(30).Next(10, 360), Resize = resize
                            });
                        }
                    }

                    //if (horizontalFlip)
                    //{
                    //    folderMapData.Add(new ImageMapInfo() { Filepath = file, Label = counter, RotationAngle = 0, Flip = Emgu.CV.CvEnum.FlipType.Horizontal, Resize = resize });
                    //}

                    //if (verticalFlip)
                    //{
                    //    folderMapData.Add(new ImageMapInfo() { Filepath = file, Label = counter, RotationAngle = 0, Flip = Emgu.CV.CvEnum.FlipType.Vertical, Resize = resize });
                    //}
                }

                counter++;
            }

            Shuffle();
        }
Exemplo n.º 5
0
        public Dictionary <string, List <double> > Train(object trainData, object validationData, int epoches, int batchSize, On_Epoch_Start OnEpochStart, On_Epoch_End OnEpochEnd, On_Batch_Start onBatchStart, On_Batch_End OnBatchEnd, bool shuffle = false)
        {
            XYFrame train      = (XYFrame)trainData;
            XYFrame validation = validationData != null ? (XYFrame)validationData : null;
            Dictionary <string, List <double> > result = new Dictionary <string, List <double> >();
            var trainer      = Trainer.CreateTrainer(Model, lossFunc, metricFunc, learners);
            int currentEpoch = 1;
            Dictionary <string, double> metricsList = new Dictionary <string, double>();

            while (currentEpoch <= epoches)
            {
                if (shuffle)
                {
                    train.Shuffle();
                }

                metricsList = new Dictionary <string, double>();
                OnEpochStart(currentEpoch);
                int miniBatchCount = 1;
                while (train.NextBatch(miniBatchCount, batchSize))
                {
                    onBatchStart(currentEpoch, miniBatchCount);
                    Value features = DataFrameUtil.GetValueBatch(train.CurrentBatch.XFrame);
                    Value labels   = DataFrameUtil.GetValueBatch(train.CurrentBatch.YFrame);

                    trainer.TrainMinibatch(new Dictionary <SiaVariable, Value>()
                    {
                        { featureVariable, features }, { labelVariable, labels }
                    }, GlobalParameters.Device);
                    OnBatchEnd(currentEpoch, miniBatchCount, trainer.TotalNumberOfSamplesSeen(), trainer.PreviousMinibatchLossAverage(), new Dictionary <string, double>()
                    {
                        { metricName, trainer.PreviousMinibatchEvaluationAverage() }
                    });
                    miniBatchCount++;
                }

                if (!result.ContainsKey("loss"))
                {
                    result.Add("loss", new List <double>());
                }

                if (!result.ContainsKey(metricName))
                {
                    result.Add(metricName, new List <double>());
                }

                double lossValue   = trainer.PreviousMinibatchLossAverage();
                double metricValue = trainer.PreviousMinibatchEvaluationAverage();
                result["loss"].Add(lossValue);
                result[metricName].Add(metricValue);
                metricsList.Add(metricName, metricValue);
                if (validation != null)
                {
                    if (!result.ContainsKey("val_loss"))
                    {
                        result.Add("val_loss", new List <double>());
                    }

                    if (!result.ContainsKey("val_" + metricName))
                    {
                        result.Add("val_" + metricName, new List <double>());
                    }

                    int           evalMiniBatchCount       = 1;
                    List <double> totalEvalBatchLossList   = new List <double>();
                    List <double> totalEvalMetricValueList = new List <double>();
                    while (validation.NextBatch(evalMiniBatchCount, batchSize))
                    {
                        SiaVariable actualVariable = CNTKLib.InputVariable(labelVariable.Shape, DataType.Float);
                        var         evalLossFunc   = Losses.Get(lossName, labelVariable, actualVariable);
                        var         evalMetricFunc = Metrics.Get(metricName, labelVariable, actualVariable);
                        Value       actual         = EvaluateInternal(validation.CurrentBatch.XFrame);
                        Value       expected       = DataFrameUtil.GetValueBatch(validation.CurrentBatch.YFrame);
                        var         inputDataMap   = new Dictionary <SiaVariable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        var outputDataMap = new Dictionary <SiaVariable, Value>()
                        {
                            { evalLossFunc.Output, null }
                        };

                        evalLossFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalLoss = outputDataMap[evalLossFunc.Output].GetDenseData <float>(evalLossFunc.Output).Select(x => x.First()).ToList();
                        totalEvalBatchLossList.Add(evalLoss.Average());

                        inputDataMap = new Dictionary <SiaVariable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        outputDataMap = new Dictionary <SiaVariable, Value>()
                        {
                            { evalMetricFunc.Output, null }
                        };
                        evalMetricFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalMetric = outputDataMap[evalMetricFunc.Output].GetDenseData <float>(evalMetricFunc.Output).Select(x => x.First()).ToList();
                        totalEvalMetricValueList.Add(evalMetric.Average());

                        evalMiniBatchCount++;
                    }

                    result["val_loss"].Add(totalEvalBatchLossList.Average());
                    metricsList.Add("val_loss", totalEvalBatchLossList.Average());
                    result["val_" + metricName].Add(totalEvalMetricValueList.Average());
                    metricsList.Add("val_" + metricName, totalEvalMetricValueList.Average());
                }

                OnEpochEnd(currentEpoch, trainer.TotalNumberOfSamplesSeen(), lossValue, metricsList);
                currentEpoch++;
            }

            return(result);
        }
        public Dictionary <string, List <double> > Train(object trainData, object validationData, int epoches, int batchSize, On_Epoch_Start OnEpochStart, On_Epoch_End OnEpochEnd, On_Batch_Start onBatchStart, On_Batch_End OnBatchEnd, bool shuffle = false)
        {
            ImageDataGenerator train      = (ImageDataGenerator)trainData;
            ImageDataGenerator validation = validationData != null ? (ImageDataGenerator)validationData : null;
            Dictionary <string, List <double> > result = new Dictionary <string, List <double> >();
            var trainer      = Trainer.CreateTrainer(Model, lossFunc, metricFunc, learners);
            int currentEpoch = 1;
            Dictionary <string, double> metricsList = new Dictionary <string, double>();
            int imageSize  = featureVariable.Shape.Rank == 1 ? featureVariable.Shape[0] : featureVariable.Shape[0] * featureVariable.Shape[1] * featureVariable.Shape[2];
            int numClasses = labelVariable.Shape[0];
            IList <StreamConfiguration> streamConfigurations = new StreamConfiguration[] { new StreamConfiguration("features", imageSize), new StreamConfiguration("labels", numClasses) };

            if (train.GenType == ImageGenType.FromTextFile)
            {
                train.LoadTextData(featureVariable, labelVariable);
                if (validation != null)
                {
                    validation.LoadTextData(featureVariable, labelVariable);
                }
            }

            while (currentEpoch <= epoches)
            {
                metricsList.Clear();
                OnEpochStart(currentEpoch);
                int           miniBatchCount  = 1;
                List <double> miniBatchLosses = new List <double>();
                while (!train.NextBatch(batchSize))
                {
                    onBatchStart(currentEpoch, miniBatchCount);
                    trainer.TrainMinibatch(new Dictionary <SiaVariable, Value> {
                        { featureVariable, train.CurrentBatchX }, { labelVariable, train.CurrentBatchY }
                    }, true, GlobalParameters.Device);
                    OnBatchEnd(currentEpoch, miniBatchCount, trainer.TotalNumberOfSamplesSeen(), trainer.PreviousMinibatchLossAverage(), new Dictionary <string, double>()
                    {
                        { metricName, trainer.PreviousMinibatchEvaluationAverage() }
                    });
                    miniBatchLosses.Add(trainer.PreviousMinibatchLossAverage());
                    miniBatchCount++;
                }

                if (!result.ContainsKey("loss"))
                {
                    result.Add("loss", new List <double>());
                }

                if (!result.ContainsKey(metricName))
                {
                    result.Add(metricName, new List <double>());
                }

                double lossValue   = miniBatchLosses.Average();
                double metricValue = trainer.PreviousMinibatchEvaluationAverage();
                result["loss"].Add(lossValue);
                result[metricName].Add(metricValue);
                metricsList.Add(metricName, metricValue);
                if (validation != null)
                {
                    if (!result.ContainsKey("val_loss"))
                    {
                        result.Add("val_loss", new List <double>());
                    }

                    if (!result.ContainsKey("val_" + metricName))
                    {
                        result.Add("val_" + metricName, new List <double>());
                    }

                    List <double> totalEvalBatchLossList   = new List <double>();
                    List <double> totalEvalMetricValueList = new List <double>();
                    while (validation.NextBatch(batchSize))
                    {
                        SiaVariable actualVariable = CNTKLib.InputVariable(labelVariable.Shape, DataType.Float);
                        var         evalLossFunc   = Losses.Get(lossName, labelVariable, actualVariable);
                        var         evalMetricFunc = Metrics.Get(metricName, labelVariable, actualVariable);
                        Value       actual         = EvaluateInternal(validation.CurrentBatchX);
                        Value       expected       = validation.CurrentBatchY;
                        var         inputDataMap   = new Dictionary <SiaVariable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        var outputDataMap = new Dictionary <SiaVariable, Value>()
                        {
                            { evalLossFunc.Output, null }
                        };

                        evalLossFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalLoss = outputDataMap[evalLossFunc.Output].GetDenseData <float>(evalLossFunc.Output).Select(x => x.First()).ToList();
                        totalEvalBatchLossList.Add(evalLoss.Average());

                        inputDataMap = new Dictionary <SiaVariable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        outputDataMap = new Dictionary <SiaVariable, Value>()
                        {
                            { evalMetricFunc.Output, null }
                        };
                        evalMetricFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalMetric = outputDataMap[evalMetricFunc.Output].GetDenseData <float>(evalMetricFunc.Output).Select(x => x.First()).ToList();
                        totalEvalMetricValueList.Add(evalMetric.Average());
                    }

                    result["val_loss"].Add(totalEvalBatchLossList.Average());
                    metricsList.Add("val_loss", totalEvalBatchLossList.Average());
                    result["val_" + metricName].Add(totalEvalMetricValueList.Average());
                    metricsList.Add("val_" + metricName, totalEvalMetricValueList.Average());
                }

                OnEpochEnd(currentEpoch, trainer.TotalNumberOfSamplesSeen(), lossValue, metricsList);
                currentEpoch++;
            }

            return(result);
        }