Exemple #1
0
        public void HyperbandOptimizer_Optimize()
        {
            var parameters = new IParameterSpec[]
            {
                new MinMaxParameterSpec(min: 80, max: 300, transform: Transform.Linear),
                new MinMaxParameterSpec(min: 0.02, max:  0.2, transform: Transform.Log10),
                new MinMaxParameterSpec(min: 8, max: 15, transform: Transform.Linear),
            };

            var random = new Random(343);

            OptimizerResult minimize(double[] p, double r)
            {
                var error = random.NextDouble();

                return(new OptimizerResult(p, error));
            }

            var sut = new HyperbandOptimizer(
                parameters,
                maximumBudget: 81,
                eta: 5,
                skipLastIterationOfEachRound: false,
                seed: 34);

            var actual = sut.Optimize(minimize);

            AssertOptimizerResults(Expected, actual);
        }
Exemple #2
0
        public void HyperbandOptimizer_OptimizeBest()
        {
            var parameters = new IParameterSpec[]
            {
                new MinMaxParameterSpec(min: 80, max: 300, transform: Transform.Linear),   // iterations
                new MinMaxParameterSpec(min: 0.02, max:  0.2, transform: Transform.Log10), // learning rate
                new MinMaxParameterSpec(min: 8, max: 15, transform: Transform.Linear),     // maximumTreeDepth
            };

            var random = new Random(343);
            HyperbandObjectiveFunction minimize = (p, r) =>
            {
                var error = random.NextDouble();
                return(new OptimizerResult(p, error));
            };

            var sut = new HyperbandOptimizer(
                parameters,
                maximumUnitsOfCompute: 81,
                eta: 5,
                skipLastIterationOfEachRound: false,
                seed: 34);

            var actual   = sut.OptimizeBest(minimize);
            var expected = new OptimizerResult(new[] { 278.337940, 0.098931, 13.177449 }, 0.009549);

            AssertOptimizerResult(expected, actual);
        }
        public void GradientBoost_Optimize_Hyperparameters()
        {
            #region read and split data
            // Use StreamReader(filepath) when running from filesystem
            var parser     = new CsvParser(() => new StringReader(Resources.winequality_white));
            var targetName = "quality";

            // read feature matrix
            var observations = parser.EnumerateRows(c => c != targetName)
                               .ToF64Matrix();

            // read regression targets
            var targets = parser.EnumerateRows(targetName)
                          .ToF64Vector();

            // creates training test splitter,
            // Since this is a regression problem, we use the random training/test set splitter.
            // 30 % of the data is used for the test set.
            var splitter = new RandomTrainingTestIndexSplitter <double>(trainingPercentage: 0.7, seed: 24);

            var trainingTestSplit = splitter.SplitSet(observations, targets);
            var trainSet          = trainingTestSplit.TrainingSet;
            var testSet           = trainingTestSplit.TestSet;
            #endregion

            // since this is a regression problem we are using square error as metric
            // for evaluating how well the model performs.
            var metric = new MeanSquaredErrorRegressionMetric();

            // Usually better results can be achieved by tuning a gradient boost learner

            var numberOfFeatures = trainSet.Observations.ColumnCount;

            // Parameter specs for the optimizer
            // best parameter to tune on random forest is featuresPrSplit.
            var parameters = new IParameterSpec[]
            {
                new MinMaxParameterSpec(min: 80, max: 300,
                                        transform: Transform.Linear, parameterType: ParameterType.Discrete), // iterations

                new MinMaxParameterSpec(min: 0.02, max:  0.2,
                                        transform: Transform.Logarithmic, parameterType: ParameterType.Continuous), // learning rate

                new MinMaxParameterSpec(min: 8, max: 15,
                                        transform: Transform.Linear, parameterType: ParameterType.Discrete), // maximumTreeDepth

                new MinMaxParameterSpec(min: 0.5, max: 0.9,
                                        transform: Transform.Linear, parameterType: ParameterType.Continuous), // subSampleRatio

                new MinMaxParameterSpec(min: 1, max: numberOfFeatures,
                                        transform: Transform.Linear, parameterType: ParameterType.Discrete), // featuresPrSplit
            };

            // Further split the training data to have a validation set to measure
            // how well the model generalizes to unseen data during the optimization.
            var validationSplit = new RandomTrainingTestIndexSplitter <double>(trainingPercentage: 0.7, seed: 24)
                                  .SplitSet(trainSet.Observations, trainSet.Targets);


            // Define optimizer objective (function to minimize)
            Func <double[], OptimizerResult> minimize = p =>
            {
                // create the candidate learner using the current optimization parameters.
                var candidateLearner = new RegressionSquareLossGradientBoostLearner(
                    iterations: (int)p[0],
                    learningRate: p[1],
                    maximumTreeDepth: (int)p[2],
                    subSampleRatio: p[3],
                    featuresPrSplit: (int)p[4],
                    runParallel: false);

                var candidateModel = candidateLearner.Learn(validationSplit.TrainingSet.Observations,
                                                            validationSplit.TrainingSet.Targets);

                var validationPredictions = candidateModel.Predict(validationSplit.TestSet.Observations);
                var candidateError        = metric.Error(validationSplit.TestSet.Targets, validationPredictions);

                // trace current error
                Trace.WriteLine(string.Format("Candidate Error: {0:0.0000}, Candidate Parameters: {1}",
                                              candidateError, string.Join(", ", p)));

                return(new OptimizerResult(p, candidateError));
            };

            // create random search optimizer
            var optimizer = new RandomSearchOptimizer(parameters, iterations: 30, runParallel: true);

            // find best hyperparameters
            var result = optimizer.OptimizeBest(minimize);
            var best   = result.ParameterSet;

            // create the final learner using the best hyperparameters.
            var learner = new RegressionSquareLossGradientBoostLearner(
                iterations: (int)best[0],
                learningRate: best[1],
                maximumTreeDepth: (int)best[2],
                subSampleRatio: best[3],
                featuresPrSplit: (int)best[4],
                runParallel: false);

            // learn model with found parameters
            var model = learner.Learn(trainSet.Observations, trainSet.Targets);

            // predict the training and test set.
            var trainPredictions = model.Predict(trainSet.Observations);
            var testPredictions  = model.Predict(testSet.Observations);

            // measure the error on training and test set.
            var trainError = metric.Error(trainSet.Targets, trainPredictions);
            var testError  = metric.Error(testSet.Targets, testPredictions);

            // Optimizer found hyperparameters.
            Trace.WriteLine(string.Format("Found parameters, iterations:  {0}, learning rate {1:0.000}:  maximumTreeDepth: {2}, subSampleRatio {3:0.000}, featuresPrSplit: {4} ",
                                          (int)best[0], best[1], (int)best[2], best[3], (int)best[4]));
            TraceTrainingAndTestError(trainError, testError);
        }
Exemple #4
0
        public void Hyper_Parameter_Tuning()
        {
            #region Read data

            // Use StreamReader(filepath) when running from filesystem
            var parser     = new CsvParser(() => new StringReader(Resources.winequality_white));
            var targetName = "quality";

            // read feature matrix
            var observations = parser.EnumerateRows(c => c != targetName)
                               .ToF64Matrix();

            // read classification targets
            var targets = parser.EnumerateRows(targetName)
                          .ToF64Vector();

            #endregion

            // metric to minimize
            var metric = new MeanSquaredErrorRegressionMetric();

            // Parameter ranges for the optimizer
            var paramers = new IParameterSpec[]
            {
                new MinMaxParameterSpec(min: 1, max: 100,
                                        transform: Transform.Linear, parameterType: ParameterType.Discrete), // maximumTreeDepth
                new MinMaxParameterSpec(min: 1, max: 16,
                                        transform: Transform.Linear, parameterType: ParameterType.Discrete), // minimumSplitSize
            };

            // create random search optimizer
            var optimizer = new RandomSearchOptimizer(paramers, iterations: 30, runParallel: true);

            // other availible optimizers
            // GridSearchOptimizer
            // GlobalizedBoundedNelderMeadOptimizer
            // ParticleSwarmOptimizer
            // BayesianOptimizer

            // function to minimize
            Func <double[], OptimizerResult> minimize = p =>
            {
                var cv          = new RandomCrossValidation <double>(crossValidationFolds: 5, seed: 42);
                var optlearner  = new RegressionDecisionTreeLearner(maximumTreeDepth: (int)p[0], minimumSplitSize: (int)p[1]);
                var predictions = cv.CrossValidate(optlearner, observations, targets);
                var error       = metric.Error(targets, predictions);

                Trace.WriteLine(string.Format("Candidate Error: {0:0.0000}, Candidate Parameters: {1}",
                                              error, string.Join(", ", p)));

                return(new OptimizerResult(p, error));
            };

            // run optimizer
            var result         = optimizer.OptimizeBest(minimize);
            var bestParameters = result.ParameterSet;

            Trace.WriteLine("Result: " + result.Error);

            // create learner with found parameters
            var learner = new RegressionDecisionTreeLearner(maximumTreeDepth: (int)bestParameters[0], minimumSplitSize: (int)bestParameters[1]);

            // learn model with found parameters
            var model = learner.Learn(observations, targets);
        }