Ejemplo n.º 1
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter bounds for each optimization parameter</param>
        /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param>
        /// <param name="numberOfStartingPoints">Number of randomly created starting points to use for the initial model in the first iteration (default is 5)</param>
        /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration.
        /// The parameter sets are inlcuded in order of most promissing outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        public BayesianOptimizer(ParameterBounds[] parameters, int maxIterations, int numberOfStartingPoints = 5, int numberOfCandidatesEvaluatedPrIteration = 1, int seed = 42)
        {
            if (parameters == null)
            {
                throw new ArgumentNullException("parameters");
            }
            if (maxIterations <= 0)
            {
                throw new ArgumentNullException("maxIterations must be at least 1");
            }
            if (numberOfStartingPoints < 1)
            {
                throw new ArgumentNullException("numberOfParticles must be at least 1");
            }

            m_parameters             = parameters;
            m_maxIterations          = maxIterations;
            m_numberOfStartingPoints = numberOfStartingPoints;
            m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration;

            m_sampler = new RandomUniform(seed);

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(30, 10, 2000, parameters.Length, 1e-6, 1.0, 42, false);

            // optimizer for finding maximum expectation (most promissing hyper parameters) from extra trees model.
            m_maximizer = new RandomSearchOptimizer(m_parameters, 1000, 42, false);

            // acquisition function to maximize,
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;
        }
        /// <summary>
        /// Implementation of the SMAC algorithm for hyperparameter optimization.
        /// Based on: Sequential Model-Based Optimization for General Algorithm Configuration:
        /// https://ml.informatik.uni-freiburg.de/papers/11-LION5-SMAC.pdf
        /// Uses Bayesian optimization in tandem with a greedy local search on the top performing solutions.
        /// And ML.Net implementation:
        /// https://github.com/dotnet/machinelearning/blob/master/src/Microsoft.ML.Sweeper/Algorithms/SmacSweeper.cs
        /// </summary>
        /// <param name="parameters">A list of parameter specs, one for each optimization parameter</param>
        /// <param name="iterations">The number of iterations to perform.
        /// Iteration * functionEvaluationsPerIteration = totalFunctionEvaluations</param>
        /// <param name="randomStartingPointCount">Number of randomly parameter sets used
        /// for initialization (default is 20)</param>
        /// <param name="functionEvaluationsPerIterationCount">The number of function evaluations per iteration.
        /// The parameter sets are included in order of most promising outcome (default is 1)</param>
        /// <param name="localSearchPointCount">The number of top contenders
        /// to use in the greedy local search (default is (10)</param>
        /// <param name="randomSearchPointCount">The number of random parameter sets
        /// used when maximizing the expected improvement acquisition function (default is 1000)</param>
        /// <param name="epsilon">Threshold for ending local search (default is 0.00001)</param>
        /// <param name="seed"></param>
        public SmacOptimizer(IParameterSpec[] parameters,
                             int iterations,
                             int randomStartingPointCount             = 20,
                             int functionEvaluationsPerIterationCount = 1,
                             int localSearchPointCount  = 10,
                             int randomSearchPointCount = 1000,
                             double epsilon             = 0.00001,
                             int seed = 42)
        {
            m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));

            if (iterations < 1)
            {
                throw new ArgumentException(nameof(iterations) +
                                            "must be at least 1. Was: " + iterations);
            }
            if (randomStartingPointCount < 1)
            {
                throw new ArgumentException(nameof(randomStartingPointCount) +
                                            "must be at least 1. Was: " + randomStartingPointCount);
            }
            if (functionEvaluationsPerIterationCount < 1)
            {
                throw new ArgumentException(nameof(functionEvaluationsPerIterationCount) +
                                            "must be at least 1. Was: " + functionEvaluationsPerIterationCount);
            }
            if (localSearchPointCount < 1)
            {
                throw new ArgumentException(nameof(localSearchPointCount) +
                                            "must be at least 1. Was: " + localSearchPointCount);
            }
            if (randomSearchPointCount < 1)
            {
                throw new ArgumentException(nameof(randomSearchPointCount) +
                                            "must be at least 1. Was: " + randomSearchPointCount);
            }

            m_random = new Random(seed);
            // Use member to seed the random uniform sampler.
            m_sampler    = new RandomUniform(m_random.Next());
            m_iterations = iterations;
            m_randomStartingPointsCount            = randomStartingPointCount;
            m_functionEvaluationsPerIterationCount = functionEvaluationsPerIterationCount;
            m_localSearchPointCount  = localSearchPointCount;
            m_randomSearchPointCount = randomSearchPointCount;
            m_epsilon = epsilon;

            // Hyper parameters for regression extra trees learner.
            // These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter,
            // the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(trees: 10,
                                                                      minimumSplitSize: 2,
                                                                      maximumTreeDepth: 2000,
                                                                      featuresPrSplit: parameters.Length,
                                                                      minimumInformationGain: 1e-6,
                                                                      subSampleRatio: 1.0,
                                                                      seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                                      runParallel: false);
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter bounds for each optimization parameter</param>
        /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param>
        /// <param name="previousParameterSets">Parameter sets from previous run</param>
        /// <param name="previousParameterSetScores">Scores from from previous run corresponding to each parameter set</param>
        /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration.
        /// The parameter sets are inlcuded in order of most promissing outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        public BayesianOptimizer(ParameterBounds[] parameters, int maxIterations, List <double[]> previousParameterSets, List <double> previousParameterSetScores,
                                 int numberOfCandidatesEvaluatedPrIteration = 1, int seed = 42)
        {
            if (parameters == null)
            {
                throw new ArgumentNullException("parameters");
            }
            if (maxIterations <= 0)
            {
                throw new ArgumentNullException("maxIterations must be at least 1");
            }
            if (previousParameterSets == null)
            {
                throw new ArgumentNullException("previousParameterSets");
            }
            if (previousParameterSetScores == null)
            {
                throw new ArgumentNullException("previousResults");
            }
            if (previousParameterSets.Count != previousParameterSetScores.Count)
            {
                throw new ArgumentException("previousParameterSets length: "
                                            + previousParameterSets.Count + " does not correspond with previousResults length: " + previousParameterSetScores.Count);
            }
            if (previousParameterSetScores.Count < 2 || previousParameterSets.Count < 2)
            {
                throw new ArgumentException("previousParameterSets length and previousResults length must be at least 2 and was: " + previousParameterSetScores.Count);
            }

            m_parameters    = parameters;
            m_maxIterations = maxIterations;
            m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration;

            m_random = new Random(seed);

            // Use member to seed the random uniform sampler.
            m_sampler = new RandomUniform(m_random.Next());

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(trees: 30,
                                                                      minimumSplitSize: 10,
                                                                      maximumTreeDepth: 2000,
                                                                      featuresPrSplit: parameters.Length,
                                                                      minimumInformationGain: 1e-6,
                                                                      subSampleRatio: 1.0,
                                                                      seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                                      runParallel: false);

            // Optimizer for finding maximum expectation (most promissing hyper parameters) from extra trees model.
            m_maximizer = new RandomSearchOptimizer(m_parameters, iterations: 1000,
                                                    seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                    runParallel: false);

            // Acquisition function to maximize.
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;

            m_previousParameterSets      = previousParameterSets;
            m_previousParameterSetScores = previousParameterSetScores;
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candidates which are evaluated and added to the existing parameter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter specs, one for each optimization parameter</param>
        /// <param name="iterations">Number of iterations. Iteration * functionEvaluationsPerIteration = totalFunctionEvaluations</param>
        /// <param name="randomStartingPointCount">Number of randomly created starting points to use for the initial model in the first iteration (default is 5)</param>
        /// <param name="functionEvaluationsPerIteration">The number of function evaluations per iteration.
        /// The parameter sets are included in order of most promising outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        /// <param name="maxDegreeOfParallelism">Maximum number of concurrent operations. Default is -1 (unlimited)</param>
        /// <param name="allowMultipleEvaluations">Enables re-evaluation of duplicate parameter sets for non-deterministic functions</param>
        public BayesianOptimizer(IParameterSpec[] parameters,
                                 int iterations,
                                 int randomStartingPointCount        = 5,
                                 int functionEvaluationsPerIteration = 1,
                                 int seed = 42,
                                 int maxDegreeOfParallelism    = -1,
                                 bool allowMultipleEvaluations = false)
        {
            if (iterations <= 0)
            {
                throw new ArgumentException("maxIterations must be at least 1");
            }
            if (randomStartingPointCount < 1)
            {
                throw new ArgumentException("numberOfParticles must be at least 1");
            }

            m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
            m_iterations = iterations;
            m_randomStartingPointCount        = randomStartingPointCount;
            m_functionEvaluationsPerIteration = functionEvaluationsPerIteration;
            m_runParallel     = maxDegreeOfParallelism != 1;
            m_parallelOptions = new ParallelOptions {
                MaxDegreeOfParallelism = maxDegreeOfParallelism
            };
            m_allowMultipleEvaluations = allowMultipleEvaluations;
            m_locker = new object();

            m_random = new Random(seed);

            // Use member to seed the random uniform sampler.
            m_sampler = new RandomUniform(m_random.Next());

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(trees: 30,
                                                                      minimumSplitSize: 10,
                                                                      maximumTreeDepth: 2000,
                                                                      featuresPrSplit: parameters.Length,
                                                                      minimumInformationGain: 1e-6,
                                                                      subSampleRatio: 1.0,
                                                                      seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                                      runParallel: m_runParallel);

            // Optimizer for finding maximum expectation (most promising hyper parameters) from extra trees model.
            m_maximizer = new RandomSearchOptimizer(m_parameters, iterations: 1000,
                                                    seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                    runParallel: maxDegreeOfParallelism > 1);

            // Acquisition function to maximize.
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;
        }
        double RegressionExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0)
        {
            var(observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

            var sut   = new RegressionExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, subSampleRatio, 42, false);
            var model = sut.Learn(observations, targets);

            var predictions = model.Predict(observations);

            var evaluator = new MeanSquaredErrorRegressionMetric();
            var error     = evaluator.Error(targets, predictions);

            return(error);
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter bounds for each optimization parameter</param>
        /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param>
        /// <param name="previousParameterSets">Parameter sets from previous run</param>
        /// <param name="previousParameterSetScores">Scores from from previous run corresponding to each parameter set</param>
        /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration.
        /// The parameter sets are inlcuded in order of most promissing outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        public BayesianOptimizer(ParameterBounds[] parameters, int maxIterations, List <double[]> previousParameterSets, List <double> previousParameterSetScores,
                                 int numberOfCandidatesEvaluatedPrIteration = 1, int seed = 42)
        {
            if (parameters == null)
            {
                throw new ArgumentNullException("parameters");
            }
            if (maxIterations <= 0)
            {
                throw new ArgumentNullException("maxIterations must be at least 1");
            }
            if (previousParameterSets == null)
            {
                throw new ArgumentNullException("previousParameterSets");
            }
            if (previousParameterSetScores == null)
            {
                throw new ArgumentNullException("previousResults");
            }
            if (previousParameterSets.Count != previousParameterSetScores.Count)
            {
                throw new ArgumentException("previousParameterSets length: "
                                            + previousParameterSets.Count + " does not correspond with previousResults length: " + previousParameterSetScores.Count);
            }
            if (previousParameterSetScores.Count < 2 || previousParameterSets.Count < 2)
            {
                throw new ArgumentException("previousParameterSets length and previousResults length must be at least 2 and was: " + previousParameterSetScores.Count);
            }


            m_parameters    = parameters;
            m_maxIterations = maxIterations;
            m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration;

            m_sampler = new RandomUniform(seed);

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(30, 10, 2000, parameters.Length, 1e-6, 1.0, 42, false);

            // optimizer for finding maximum expectation (most promissing hyper parameters) from random forest model
            m_maximizer = new RandomSearchOptimizer(m_parameters, 1000, 42, false);

            // acquisition function to maximize,
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;

            m_previousParameterSets      = previousParameterSets;
            m_previousParameterSetScores = previousParameterSetScores;
        }
Ejemplo n.º 7
0
        public void RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass_100_Trees_Parallel()
        {
            var(observations, targets) = DataSetUtilities.LoadGlassDataSet();

            var sut = new RegressionExtremelyRandomizedTreesLearner(100, 1, 100, 1, 0.0001, 1.0, 42, true);

            var model = sut.Learn(observations, targets);

            var predictions = model.Predict(observations);

            var evaluator = new MeanSquaredErrorRegressionMetric();
            var error     = evaluator.Error(targets, predictions);

            Assert.AreEqual(0.33450356466453129, error, m_delta);
        }
Ejemplo n.º 8
0
        double RegressionExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0)
        {
            var parser       = new CsvParser(() => new StringReader(Resources.AptitudeData));
            var observations = parser.EnumerateRows(v => v != "Pass").ToF64Matrix();
            var targets      = parser.EnumerateRows("Pass").ToF64Vector();
            var rows         = targets.Length;

            var sut   = new RegressionExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, subSampleRatio, 42, false);
            var model = sut.Learn(observations, targets);

            var predictions = model.Predict(observations);

            var evaluator = new MeanSquaredErrorRegressionMetric();
            var error     = evaluator.Error(targets, predictions);

            return(error);
        }
Ejemplo n.º 9
0
        public void RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass_100_Trees_Parallel()
        {
            var parser       = new CsvParser(() => new StringReader(Resources.Glass));
            var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix();
            var targets      = parser.EnumerateRows("Target").ToF64Vector();

            var sut = new RegressionExtremelyRandomizedTreesLearner(100, 1, 100, 1, 0.0001, 1.0, 42, true);

            var model = sut.Learn(observations, targets);

            var predictions = model.Predict(observations);

            var evaluator = new MeanSquaredErrorRegressionMetric();
            var error     = evaluator.Error(targets, predictions);

            Assert.AreEqual(0.33450356466453129, error, m_delta);
        }
Ejemplo n.º 10
0
        public void RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass_100_Indices()
        {
            var(observations, targets) = DataSetUtilities.LoadGlassDataSet();

            var sut = new RegressionExtremelyRandomizedTreesLearner(100, 1, 100, 1, 0.0001, 1.0, 42, false);

            var indices = Enumerable.Range(0, targets.Length).ToArray();

            indices.Shuffle(new Random(42));
            indices = indices.Take((int)(targets.Length * 0.7))
                      .ToArray();

            var model = sut.Learn(observations, targets, indices);

            var predictions = model.Predict(observations);

            var evaluator = new MeanSquaredErrorRegressionMetric();
            var error     = evaluator.Error(targets, predictions);

            Assert.AreEqual(0.592240619161302, error, m_delta);
        }
Ejemplo n.º 11
0
        public void RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass_100_Indices()
        {
            var parser       = new CsvParser(() => new StringReader(Resources.Glass));
            var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix();
            var targets      = parser.EnumerateRows("Target").ToF64Vector();
            var rows         = targets.Length;

            var sut = new RegressionExtremelyRandomizedTreesLearner(100, 1, 100, 1, 0.0001, 1.0, 42, false);

            var indices = Enumerable.Range(0, targets.Length).ToArray();

            indices.Shuffle(new Random(42));
            indices = indices.Take((int)(targets.Length * 0.7))
                      .ToArray();

            var model = sut.Learn(observations, targets, indices);

            var predictions = model.Predict(observations);

            var evaluator = new MeanSquaredErrorRegressionMetric();
            var error     = evaluator.Error(targets, predictions);

            Assert.AreEqual(0.622380056587391, error, 0.0000001);
        }
        /// <summary>
        ///     Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial
        ///     parameter sets and scores.
        ///     This model is used to sample new promising parameter candidates which are evaluated and added to the existing
        ///     parameter sets.
        ///     This process iterates several times. The method is computational expensive so is most relevant for expensive
        ///     problems,
        ///     where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning
        ///     method.
        ///     But in that case it can usually reduce the number of iterations required to reach a good solution compared to less
        ///     sophisticated methods.
        ///     Implementation loosely based on:
        ///     http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        ///     https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        ///     https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter specs, one for each optimization parameter</param>
        /// <param name="iterations">
        ///     The number of iterations to perform.
        ///     Iteration * functionEvaluationsPerIteration = totalFunctionEvaluations
        /// </param>
        /// <param name="randomStartingPointCount">
        ///     Number of randomly parameter sets used
        ///     for initialization (default is 20)
        /// </param>
        /// <param name="functionEvaluationsPerIterationCount">
        ///     The number of function evaluations per iteration.
        ///     The parameter sets are included in order of most promising outcome (default is 1)
        /// </param>
        /// <param name="randomSearchPointCount">
        ///     The number of random parameter sets
        ///     used when maximizing the expected improvement acquisition function (default is 1000)
        /// </param>
        /// <param name="seed"></param>
        /// <param name="runParallel">
        ///     Use multi threading to speed up execution (default is false).
        ///     Note that the order of results returned the Optimize method will not be reproducible when running in parallel.
        ///     Results will be the same, only the order is not reproducible
        /// </param>
        /// <param name="maxDegreeOfParallelism">Maximum number of concurrent operations (default is -1 (unlimited))</param>
        public BayesianOptimizer(
            IParameterSpec[] parameters,
            int iterations,
            int randomStartingPointCount             = 5,
            int functionEvaluationsPerIterationCount = 1,
            int randomSearchPointCount = 1000,
            int seed                   = 42,
            bool runParallel           = false,
            int maxDegreeOfParallelism = -1)
        {
            m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));

            if (iterations < 1)
            {
                throw new ArgumentException(nameof(iterations) + "must be at least 1. Was: " + iterations);
            }

            if (randomStartingPointCount < 1)
            {
                throw new ArgumentException(
                          nameof(randomStartingPointCount) + "must be at least 1. Was: " + randomStartingPointCount
                          );
            }

            if (functionEvaluationsPerIterationCount < 1)
            {
                throw new ArgumentException(
                          nameof(functionEvaluationsPerIterationCount) +
                          "must be at least 1. Was: " +
                          functionEvaluationsPerIterationCount
                          );
            }

            if (randomSearchPointCount < 1)
            {
                throw new ArgumentException(
                          nameof(randomSearchPointCount) + "must be at least 1. Was: " + randomSearchPointCount
                          );
            }

            m_random = new Random(seed);

            // Use member to seed the random uniform sampler.
            m_sampler    = new RandomUniform(m_random.Next());
            m_iterations = iterations;
            m_randomStartingPointsCount            = randomStartingPointCount;
            m_functionEvaluationsPerIterationCount = functionEvaluationsPerIterationCount;
            m_randomSearchPointCount = randomSearchPointCount;

            // Hyper parameters for regression extra trees learner.
            // These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter,
            // the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(
                30,
                10,
                2000,
                parameters.Length,
                1e-6,
                1.0,
                m_random.Next(), // Use member to seed the random uniform sampler.
                false
                );

            m_runParallel            = runParallel;
            m_maxDegreeOfParallelism = maxDegreeOfParallelism;
        }