Esempio n. 1
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter bounds for each optimization parameter</param>
        /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param>
        /// <param name="numberOfStartingPoints">Number of randomly created starting points to use for the initial model in the first iteration (default is 5)</param>
        /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration.
        /// The parameter sets are inlcuded in order of most promissing outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        public BayesianOptimizer(ParameterBounds[] parameters, int maxIterations, int numberOfStartingPoints = 5, int numberOfCandidatesEvaluatedPrIteration = 1, int seed = 42)
        {
            if (parameters == null)
            {
                throw new ArgumentNullException("parameters");
            }
            if (maxIterations <= 0)
            {
                throw new ArgumentNullException("maxIterations must be at least 1");
            }
            if (numberOfStartingPoints < 1)
            {
                throw new ArgumentNullException("numberOfParticles must be at least 1");
            }

            m_parameters             = parameters;
            m_maxIterations          = maxIterations;
            m_numberOfStartingPoints = numberOfStartingPoints;
            m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration;

            m_sampler = new RandomUniform(seed);

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(30, 10, 2000, parameters.Length, 1e-6, 1.0, 42, false);

            // optimizer for finding maximum expectation (most promissing hyper parameters) from extra trees model.
            m_maximizer = new RandomSearchOptimizer(m_parameters, 1000, 42, false);

            // acquisition function to maximize,
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;
        }
Esempio n. 2
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter bounds for each optimization parameter</param>
        /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param>
        /// <param name="previousParameterSets">Parameter sets from previous run</param>
        /// <param name="previousParameterSetScores">Scores from from previous run corresponding to each parameter set</param>
        /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration.
        /// The parameter sets are inlcuded in order of most promissing outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        public BayesianOptimizer(ParameterBounds[] parameters, int maxIterations, List <double[]> previousParameterSets, List <double> previousParameterSetScores,
                                 int numberOfCandidatesEvaluatedPrIteration = 1, int seed = 42)
        {
            if (parameters == null)
            {
                throw new ArgumentNullException("parameters");
            }
            if (maxIterations <= 0)
            {
                throw new ArgumentNullException("maxIterations must be at least 1");
            }
            if (previousParameterSets == null)
            {
                throw new ArgumentNullException("previousParameterSets");
            }
            if (previousParameterSetScores == null)
            {
                throw new ArgumentNullException("previousResults");
            }
            if (previousParameterSets.Count != previousParameterSetScores.Count)
            {
                throw new ArgumentException("previousParameterSets length: "
                                            + previousParameterSets.Count + " does not correspond with previousResults length: " + previousParameterSetScores.Count);
            }
            if (previousParameterSetScores.Count < 2 || previousParameterSets.Count < 2)
            {
                throw new ArgumentException("previousParameterSets length and previousResults length must be at least 2 and was: " + previousParameterSetScores.Count);
            }

            m_parameters    = parameters;
            m_maxIterations = maxIterations;
            m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration;

            m_random = new Random(seed);

            // Use member to seed the random uniform sampler.
            m_sampler = new RandomUniform(m_random.Next());

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(trees: 30,
                                                                      minimumSplitSize: 10,
                                                                      maximumTreeDepth: 2000,
                                                                      featuresPrSplit: parameters.Length,
                                                                      minimumInformationGain: 1e-6,
                                                                      subSampleRatio: 1.0,
                                                                      seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                                      runParallel: false);

            // Optimizer for finding maximum expectation (most promissing hyper parameters) from extra trees model.
            m_maximizer = new RandomSearchOptimizer(m_parameters, iterations: 1000,
                                                    seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                    runParallel: false);

            // Acquisition function to maximize.
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;

            m_previousParameterSets      = previousParameterSets;
            m_previousParameterSetScores = previousParameterSetScores;
        }
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candidates which are evaluated and added to the existing parameter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter specs, one for each optimization parameter</param>
        /// <param name="iterations">Number of iterations. Iteration * functionEvaluationsPerIteration = totalFunctionEvaluations</param>
        /// <param name="randomStartingPointCount">Number of randomly created starting points to use for the initial model in the first iteration (default is 5)</param>
        /// <param name="functionEvaluationsPerIteration">The number of function evaluations per iteration.
        /// The parameter sets are included in order of most promising outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        /// <param name="maxDegreeOfParallelism">Maximum number of concurrent operations. Default is -1 (unlimited)</param>
        /// <param name="allowMultipleEvaluations">Enables re-evaluation of duplicate parameter sets for non-deterministic functions</param>
        public BayesianOptimizer(IParameterSpec[] parameters,
                                 int iterations,
                                 int randomStartingPointCount        = 5,
                                 int functionEvaluationsPerIteration = 1,
                                 int seed = 42,
                                 int maxDegreeOfParallelism    = -1,
                                 bool allowMultipleEvaluations = false)
        {
            if (iterations <= 0)
            {
                throw new ArgumentException("maxIterations must be at least 1");
            }
            if (randomStartingPointCount < 1)
            {
                throw new ArgumentException("numberOfParticles must be at least 1");
            }

            m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
            m_iterations = iterations;
            m_randomStartingPointCount        = randomStartingPointCount;
            m_functionEvaluationsPerIteration = functionEvaluationsPerIteration;
            m_runParallel     = maxDegreeOfParallelism != 1;
            m_parallelOptions = new ParallelOptions {
                MaxDegreeOfParallelism = maxDegreeOfParallelism
            };
            m_allowMultipleEvaluations = allowMultipleEvaluations;
            m_locker = new object();

            m_random = new Random(seed);

            // Use member to seed the random uniform sampler.
            m_sampler = new RandomUniform(m_random.Next());

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(trees: 30,
                                                                      minimumSplitSize: 10,
                                                                      maximumTreeDepth: 2000,
                                                                      featuresPrSplit: parameters.Length,
                                                                      minimumInformationGain: 1e-6,
                                                                      subSampleRatio: 1.0,
                                                                      seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                                      runParallel: m_runParallel);

            // Optimizer for finding maximum expectation (most promising hyper parameters) from extra trees model.
            m_maximizer = new RandomSearchOptimizer(m_parameters, iterations: 1000,
                                                    seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                    runParallel: maxDegreeOfParallelism > 1);

            // Acquisition function to maximize.
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;
        }
Esempio n. 4
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter bounds for each optimization parameter</param>
        /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param>
        /// <param name="previousParameterSets">Parameter sets from previous run</param>
        /// <param name="previousParameterSetScores">Scores from from previous run corresponding to each parameter set</param>
        /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration.
        /// The parameter sets are inlcuded in order of most promissing outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        public BayesianOptimizer(ParameterBounds[] parameters, int maxIterations, List <double[]> previousParameterSets, List <double> previousParameterSetScores,
                                 int numberOfCandidatesEvaluatedPrIteration = 1, int seed = 42)
        {
            if (parameters == null)
            {
                throw new ArgumentNullException("parameters");
            }
            if (maxIterations <= 0)
            {
                throw new ArgumentNullException("maxIterations must be at least 1");
            }
            if (previousParameterSets == null)
            {
                throw new ArgumentNullException("previousParameterSets");
            }
            if (previousParameterSetScores == null)
            {
                throw new ArgumentNullException("previousResults");
            }
            if (previousParameterSets.Count != previousParameterSetScores.Count)
            {
                throw new ArgumentException("previousParameterSets length: "
                                            + previousParameterSets.Count + " does not correspond with previousResults length: " + previousParameterSetScores.Count);
            }
            if (previousParameterSetScores.Count < 2 || previousParameterSets.Count < 2)
            {
                throw new ArgumentException("previousParameterSets length and previousResults length must be at least 2 and was: " + previousParameterSetScores.Count);
            }


            m_parameters    = parameters;
            m_maxIterations = maxIterations;
            m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration;

            m_sampler = new RandomUniform(seed);

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(30, 10, 2000, parameters.Length, 1e-6, 1.0, 42, false);

            // optimizer for finding maximum expectation (most promissing hyper parameters) from random forest model
            m_maximizer = new RandomSearchOptimizer(m_parameters, 1000, 42, false);

            // acquisition function to maximize,
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;

            m_previousParameterSets      = previousParameterSets;
            m_previousParameterSetScores = previousParameterSetScores;
        }