/// <summary> /// Sequential Model-based optimization (SMBO). SMBO learns a model based on the initial parameter sets and scores. /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets. /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems, /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method. /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods. /// Implementation loosely based on: /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf /// </summary> /// <param name="parameters">Each row is a series of values for a specific parameter</param> /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param> /// <param name="numberOfStartingPoints">Number of randomly created starting points to use for the initial model in the first iteration (default is 10)</param> /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration. /// The parameter sets are inlcuded in order of most promissing outcome (default is 3)</param> /// <param name="seed">Seed for the random initialization</param> public SequentialModelBasedOptimizer(double[][] parameters, int maxIterations, int numberOfStartingPoints = 10, int numberOfCandidatesEvaluatedPrIteration = 3, int seed = 42) { if (parameters == null) { throw new ArgumentNullException("parameters"); } if (maxIterations <= 0) { throw new ArgumentNullException("maxIterations must be at least 1"); } if (numberOfStartingPoints < 1) { throw new ArgumentNullException("numberOfParticles must be at least 1"); } m_parameters = parameters; m_maxIterations = maxIterations; m_numberOfStartingPoints = numberOfStartingPoints; m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration; m_random = new Random(seed); // hyper parameters for regression random forest learner m_learner = new RegressionRandomForestLearner(20, 1, 2000, parameters.Length, 1e-6, 1.0, 42, false); // optimizer for finding maximum expectation (most promissing hyper parameters) from random forest model m_optimizer = new ParticleSwarmOptimizer(m_parameters, 100, 40); }
/// <summary> /// Sequential Model-based optimization (SMBO). SMBO learns a model based on the initial parameter sets and scores. /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets. /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems, /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method. /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods. /// Implementation loosely based on: /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf /// </summary> /// <param name="parameters">Each row is a series of values for a specific parameter</param> /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param> /// <param name="previousParameterSets">Parameter sets from previous run</param> /// <param name="previousParameterSetScores">Scores from from previous run corresponding to each parameter set</param> /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration. /// The parameter sets are inlcuded in order of most promissing outcome (default is 3)</param> /// <param name="seed">Seed for the random initialization</param> public SequentialModelBasedOptimizer(double[][] parameters, int maxIterations, List <double[]> previousParameterSets, List <double> previousParameterSetScores, int numberOfCandidatesEvaluatedPrIteration = 3, int seed = 42) { if (parameters == null) { throw new ArgumentNullException("parameters"); } if (maxIterations <= 0) { throw new ArgumentNullException("maxIterations must be at least 1"); } if (previousParameterSets == null) { throw new ArgumentNullException("previousParameterSets"); } if (previousParameterSetScores == null) { throw new ArgumentNullException("previousResults"); } if (previousParameterSets.Count != previousParameterSetScores.Count) { throw new ArgumentException("previousParameterSets length: " + previousParameterSets.Count + " does not correspond with previousResults length: " + previousParameterSetScores.Count); } if (previousParameterSetScores.Count < 2 || previousParameterSets.Count < 2) { throw new ArgumentException("previousParameterSets length and previousResults length must be at least 2 and was: " + previousParameterSetScores.Count); } m_parameters = parameters; m_maxIterations = maxIterations; m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration; m_random = new Random(seed); // hyper parameters for regression random forest learner m_learner = new RegressionRandomForestLearner(20, 1, 2000, parameters.Length, 1e-6, 1.0, 42, false); // optimizer for finding maximum expectation (most promissing hyper parameters) from random forest model m_optimizer = new ParticleSwarmOptimizer(m_parameters, 100, 40); m_previousParameterSets = previousParameterSets; m_previousParameterSetScores = previousParameterSetScores; }