Exemplo n.º 1
0
        public void ScreenGD(AA1_MLP.Entities.DataSet wholeSet, int k, List <double> momentums, List <double> learningRates, List <double> regularizationRates, List <int> humberOfHiddenNeurons, GradientDescentParams passedParams, int numOfEpochs)
        {
            //Calling the Train method of the trainer with the desired parameters
            //n, ds, learningRate: .3, numberOfEpochs: 200, shuffle: false, debug: n.Debug, nestrov:false, momentum:0.9, resilient: false, resilientUpdateAccelerationRate: 0.3,
            //resilientUpdateSlowDownRate: 0.1, regularization: AA1_MLP.Enums.RegularizationRates.L2, regularizationRate: 0.001, validationSet: dt, batchSize: 7

            string reportsDirectory = "80SGDKFoldsReportsnonestrov";

            if (Directory.Exists(reportsDirectory))
            {
                Directory.Delete(reportsDirectory, true);
            }
            Directory.CreateDirectory(reportsDirectory);

            /*List<double> momentums = new List<double> { 0, 0.5 };
             * List<double> learningRates = new List<double> { 0.005, 0.01 };
             * List<double> regularizationRates = new List<double> { 0, 0.001 };
             * List<int> humberOfHiddenNeurons = new List<int> { 100, 90, 80, 70, 60, 50, 40, 30, 20, 10 };*/
            //  GradientDescentParams passedParams = new GradientDescentParams();
            IOptimizer trainer = new Gradientdescent();

            //AdamParams passedParams = new AdamParams();
            //IOptimizer trainer = new Adam();
            passedParams.numberOfEpochs = numOfEpochs;
            passedParams.batchSize      = 10;
            for (int idxnh = 0; idxnh < humberOfHiddenNeurons.Count; idxnh++)
            {
                for (int idxmo = 0; idxmo < momentums.Count; idxmo++)
                {
                    for (int idxLR = 0; idxLR < learningRates.Count; idxLR++)
                    {
                        for (int idxReg = 0; idxReg < regularizationRates.Count; idxReg++)
                        {
                            int nh = humberOfHiddenNeurons[idxnh];
                            passedParams.learningRate       = learningRates[idxLR];
                            passedParams.regularization     = Regularizations.L2;
                            passedParams.regularizationRate = regularizationRates[idxReg];
                            passedParams.momentum           = momentums[idxmo];


                            passedParams.NumberOfHiddenUnits = nh;

                            RunKFoldWithSetOfParams(wholeSet, k, passedParams, trainer, reportsDirectory);
                        }
                    }
                }
            }
        }
Exemplo n.º 2
0
        private void RunKFoldWithSetOfParams(AA1_MLP.Entities.DataSet wholeSet, int k, INeuralTrainerParams passedParams, IOptimizer trainer, string reportsPath)
        {
            string kRunFolderName = string.Format("hdn{0}_k{1}_lr{2}_reg{3}", passedParams.NumberOfHiddenUnits, k, passedParams.learningRate, passedParams.regularizationRate);
            string KRunfolderPath = Path.Combine(reportsPath, kRunFolderName);

            if (passedParams is GradientDescentParams)
            {
                KRunfolderPath = string.Format("{0}_mo{1}", KRunfolderPath, ((GradientDescentParams)passedParams).momentum);
            }

            if (Directory.Exists(KRunfolderPath))
            {
                Directory.Delete(KRunfolderPath);
            }

            Directory.CreateDirectory(KRunfolderPath);



            double avgMSE = 0;


            double MEE = 0, MSE = 0;



            int sizeOfDataFold = wholeSet.Labels.RowCount / k;



            //the training set split
            TrainDataset = new DataSet(
                inputs: wholeSet.Inputs.SubMatrix(sizeOfDataFold,
                                                  wholeSet.Inputs.RowCount - sizeOfDataFold, 0, wholeSet.Inputs.ColumnCount),
                labels: wholeSet.Labels.SubMatrix(sizeOfDataFold,
                                                  wholeSet.Labels.RowCount - sizeOfDataFold, 0, wholeSet.Labels.ColumnCount));
            //the validation set
            ValidationSplit = new DataSet(
                inputs: wholeSet.Inputs.SubMatrix(0, sizeOfDataFold, 0, wholeSet.Inputs.ColumnCount),
                labels: wholeSet.Labels.SubMatrix(0, sizeOfDataFold, 0, wholeSet.Labels.ColumnCount));


            Console.WriteLine("Run number:{0}", 0);
            passedParams.trainingSet   = TrainDataset;
            passedParams.validationSet = ValidationSplit;



            var lc = RunExperiment(trainer, passedParams, out MEE, out MSE);

            File.WriteAllText(Path.Combine(KRunfolderPath, "0_learningCurve.txt"), string.Join("\n", lc.Select(s => string.Join(",", s))));

            avgMSE += MSE;


            for (int idxdataFold = 1; idxdataFold < k - 1; idxdataFold++)
            {
                Console.WriteLine("Run number:{0}", idxdataFold);

                //the training set split
                TrainDataset = new DataSet(
                    inputs: wholeSet.Inputs.SubMatrix(0, idxdataFold * sizeOfDataFold, 0, wholeSet.Inputs.ColumnCount
                                                      ).Stack(wholeSet.Inputs.SubMatrix(idxdataFold * sizeOfDataFold + sizeOfDataFold,
                                                                                        wholeSet.Inputs.RowCount - idxdataFold * sizeOfDataFold - sizeOfDataFold, 0, wholeSet.Inputs.ColumnCount)),
                    labels: wholeSet.Labels.SubMatrix(0, idxdataFold * sizeOfDataFold, 0, wholeSet.Labels.ColumnCount
                                                      ).Stack(wholeSet.Labels.SubMatrix(idxdataFold * sizeOfDataFold + sizeOfDataFold,
                                                                                        wholeSet.Labels.RowCount - idxdataFold * sizeOfDataFold - sizeOfDataFold, 0, wholeSet.Labels.ColumnCount)));
                //the validation set
                ValidationSplit = new DataSet(
                    inputs: wholeSet.Inputs.SubMatrix(idxdataFold * sizeOfDataFold, sizeOfDataFold, 0, wholeSet.Inputs.ColumnCount),
                    labels: wholeSet.Labels.SubMatrix(idxdataFold * sizeOfDataFold, sizeOfDataFold, 0, wholeSet.Labels.ColumnCount));
                passedParams.trainingSet   = TrainDataset;
                passedParams.validationSet = ValidationSplit;
                lc = RunExperiment(trainer, passedParams, out MEE, out MSE);
                File.WriteAllText(Path.Combine(KRunfolderPath, idxdataFold + "_learningCurve.txt"), string.Join("\n", lc.Select(s => string.Join(",", s))));

                avgMSE += MSE;
            }

            //the training set split
            TrainDataset = new DataSet(
                inputs: wholeSet.Inputs.SubMatrix(0, (k - 1) * sizeOfDataFold, 0, wholeSet.Inputs.ColumnCount
                                                  ).Stack(wholeSet.Inputs.SubMatrix((k - 1) * sizeOfDataFold + sizeOfDataFold,
                                                                                    wholeSet.Inputs.RowCount - (k - 1) * sizeOfDataFold - sizeOfDataFold, 0, wholeSet.Inputs.ColumnCount)),
                labels: wholeSet.Labels.SubMatrix(0, (k - 1) * sizeOfDataFold, 0, wholeSet.Labels.ColumnCount
                                                  ).Stack(wholeSet.Labels.SubMatrix((k - 1) * sizeOfDataFold + sizeOfDataFold,
                                                                                    wholeSet.Labels.RowCount - (k - 1) * sizeOfDataFold - sizeOfDataFold, 0, wholeSet.Labels.ColumnCount)));
            //the validation set
            ValidationSplit = new DataSet(
                inputs: wholeSet.Inputs.SubMatrix((k - 1) * sizeOfDataFold, sizeOfDataFold, 0, wholeSet.Inputs.ColumnCount),
                labels: wholeSet.Labels.SubMatrix((k - 1) * sizeOfDataFold, sizeOfDataFold, 0, wholeSet.Labels.ColumnCount));

            Console.WriteLine("Run number:{0}", k - 1);

            passedParams.trainingSet   = TrainDataset;
            passedParams.validationSet = ValidationSplit;
            lc = RunExperiment(trainer, passedParams, out MEE, out MSE);
            File.WriteAllText(Path.Combine(KRunfolderPath, (k - 1) + "_learningCurve.txt"), string.Join("\n", lc.Select(s => string.Join(",", s))));

            avgMSE += MSE;


            avgMSE /= k;

            Console.WriteLine("Average MSE:{0}", avgMSE);
            File.AppendAllLines(Path.Combine(reportsPath, "avgMSEs"), new string[] { string.Format("{0},{1}", kRunFolderName, avgMSE) });
        }