Exemple #1
0
        public static void LoadData()
        {
            DataFrame frame = new DataFrame();

            frame.LoadFromCsv(filepath);
            frame = frame.GetFrame(5);
            frame.Normalize();
            train = frame.ConvertTimeSeries(lookback);
        }
Exemple #2
0
        public void Extract()
        {
            string tempFolder = folder + "\\tmp";

            TrainFrame = new XYFrame();
            TestFrame  = new XYFrame();
            ExtractTrain(tempFolder);
            ExtractTest(tempFolder);

            TrainFrame.SaveStream(folder + "\\train.sia");
            TestFrame.SaveStream(folder + "\\test.sia");

            Directory.Delete(tempFolder, true);
        }
Exemple #3
0
        private static void RunTest()
        {
            Random    Rnd      = new Random();
            DataFrame trnX_fin = new DataFrame();
            DataFrame trnY_fin = new DataFrame();

            for (int cc = 0; (cc < 100); cc++)
            {
                float[] sngLst = new float[100];
                for (int indx = 0; (indx < 100); indx++)
                {
                    sngLst[indx] = (float)Rnd.NextDouble();
                }

                trnX_fin.Add(sngLst);
            }

            for (int cc = 0; (cc < 100); cc++)
            {
                float[] sngLst = new float[3];
                //  fake one hot just for check
                sngLst[0] = 0;
                sngLst[1] = 1;
                sngLst[2] = 0;
                trnY_fin.Add(sngLst);
            }

            XYFrame XYfrm = new XYFrame();

            XYfrm.XFrame = trnX_fin;
            XYfrm.YFrame = trnY_fin;
            //  Split
            TrainTestFrame trainTestFrame = XYfrm.SplitTrainTest(0.3);
            //  init some values
            int        shape_of_input = XYfrm.XFrame.Shape[1];
            int        embval         = 100;
            int        seed           = 2;
            Sequential model          = new Sequential();

            model.Add(new Reshape(Shape.Create(1, embval), Shape.Create(shape_of_input)));
            model.Add(new LSTM(64, returnSequence: false, cellDim: 4, weightInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed), recurrentInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed), biasInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed)));
            model.Add(new Dense(3, act: "sigmoid", useBias: true, weightInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed)));
            model.Compile(OptOptimizers.Adam, OptLosses.MeanSquaredError, OptMetrics.Accuracy);
            model.Train(trainTestFrame.Train, 200, 8, trainTestFrame.Test);
        }
Exemple #4
0
        public static void LoadData()
        {
            trainData = new XYFrame();

            /*
             * //One approach of building dataset
             * trainData.Add(new List<float>() { 0, 0 }, 0);
             * trainData.Add(new List<float>() { 0, 1 }, 1);
             * trainData.Add(new List<float>() { 1, 0 }, 1);
             * trainData.Add(new List<float>() { 1, 1 }, 0);
             * trainData.YFrame.OneHotEncode();
             */

            //Second approach
            trainData.XFrame.Add(0, 0); trainData.YFrame.Add(0);
            trainData.XFrame.Add(0, 1); trainData.YFrame.Add(1);
            trainData.XFrame.Add(1, 0); trainData.YFrame.Add(1);
            trainData.XFrame.Add(1, 1); trainData.YFrame.Add(0);
            trainData.YFrame.OneHotEncode();
        }
Exemple #5
0
        public Dictionary <string, List <double> > Train(object trainData, object validationData, int epoches, int batchSize, On_Epoch_Start OnEpochStart, On_Epoch_End OnEpochEnd, On_Batch_Start onBatchStart, On_Batch_End OnBatchEnd, bool shuffle = false)
        {
            XYFrame train      = (XYFrame)trainData;
            XYFrame validation = validationData != null ? (XYFrame)validationData : null;
            Dictionary <string, List <double> > result = new Dictionary <string, List <double> >();
            var trainer      = Trainer.CreateTrainer(Model, lossFunc, metricFunc, learners);
            int currentEpoch = 1;
            Dictionary <string, double> metricsList = new Dictionary <string, double>();

            while (currentEpoch <= epoches)
            {
                if (shuffle)
                {
                    train.Shuffle();
                }

                metricsList = new Dictionary <string, double>();
                OnEpochStart(currentEpoch);
                int miniBatchCount = 1;
                while (train.NextBatch(miniBatchCount, batchSize))
                {
                    onBatchStart(currentEpoch, miniBatchCount);
                    Value features = DataFrameUtil.GetValueBatch(train.CurrentBatch.XFrame);
                    Value labels   = DataFrameUtil.GetValueBatch(train.CurrentBatch.YFrame);

                    trainer.TrainMinibatch(new Dictionary <Variable, Value>()
                    {
                        { featureVariable, features }, { labelVariable, labels }
                    }, GlobalParameters.Device);
                    OnBatchEnd(currentEpoch, miniBatchCount, trainer.TotalNumberOfSamplesSeen(), trainer.PreviousMinibatchLossAverage(), new Dictionary <string, double>()
                    {
                        { metricName, trainer.PreviousMinibatchEvaluationAverage() }
                    });
                    miniBatchCount++;
                }

                if (!result.ContainsKey("loss"))
                {
                    result.Add("loss", new List <double>());
                }

                if (!result.ContainsKey(metricName))
                {
                    result.Add(metricName, new List <double>());
                }

                double lossValue   = trainer.PreviousMinibatchLossAverage();
                double metricValue = trainer.PreviousMinibatchEvaluationAverage();
                result["loss"].Add(lossValue);
                result[metricName].Add(metricValue);
                metricsList.Add(metricName, metricValue);
                if (validation != null)
                {
                    if (!result.ContainsKey("val_loss"))
                    {
                        result.Add("val_loss", new List <double>());
                    }

                    if (!result.ContainsKey("val_" + metricName))
                    {
                        result.Add("val_" + metricName, new List <double>());
                    }

                    int           evalMiniBatchCount       = 1;
                    List <double> totalEvalBatchLossList   = new List <double>();
                    List <double> totalEvalMetricValueList = new List <double>();
                    while (validation.NextBatch(evalMiniBatchCount, batchSize))
                    {
                        Variable actualVariable = CNTKLib.InputVariable(labelVariable.Shape, DataType.Float);
                        var      evalLossFunc   = Losses.Get(lossName, labelVariable, actualVariable);
                        var      evalMetricFunc = Metrics.Get(metricName, labelVariable, actualVariable);
                        Value    actual         = EvaluateInternal(validation.CurrentBatch.XFrame);
                        Value    expected       = DataFrameUtil.GetValueBatch(validation.CurrentBatch.YFrame);
                        var      inputDataMap   = new Dictionary <Variable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        var outputDataMap = new Dictionary <Variable, Value>()
                        {
                            { evalLossFunc.Output, null }
                        };

                        evalLossFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalLoss = outputDataMap[evalLossFunc.Output].GetDenseData <float>(evalLossFunc.Output).Select(x => x.First()).ToList();
                        totalEvalBatchLossList.Add(evalLoss.Average());

                        inputDataMap = new Dictionary <Variable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        outputDataMap = new Dictionary <Variable, Value>()
                        {
                            { evalMetricFunc.Output, null }
                        };
                        evalMetricFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalMetric = outputDataMap[evalMetricFunc.Output].GetDenseData <float>(evalMetricFunc.Output).Select(x => x.First()).ToList();
                        totalEvalMetricValueList.Add(evalMetric.Average());

                        evalMiniBatchCount++;
                    }

                    result["val_loss"].Add(totalEvalBatchLossList.Average());
                    metricsList.Add("val_loss", totalEvalBatchLossList.Average());
                    result["val_" + metricName].Add(totalEvalMetricValueList.Average());
                    metricsList.Add("val_" + metricName, totalEvalMetricValueList.Average());
                }

                OnEpochEnd(currentEpoch, trainer.TotalNumberOfSamplesSeen(), lossValue, metricsList);
                currentEpoch++;
            }

            return(result);
        }