Exemplo n.º 1
0
        /// <summary>
        /// Propose a new list of parameter sets.
        /// </summary>
        /// <param name="parameterSetCount">The number of parameter sets to propose</param>
        /// <param name="previousResults">Results from previous runs.
        /// These are used in the model for proposing new parameter sets.
        /// If no results are provided, random parameter sets will be returned.</param>
        /// <returns></returns>
        public IEnumerable <double[]> ProposeParameterSets(int parameterSetCount,
                                                           IReadOnlyList <OptimizerResult> previousResults = null)
        {
            var previousParameterSetCount = previousResults == null ? 0 : previousResults.Count;

            if (previousParameterSetCount < m_randomStartingPointsCount)
            {
                var randomParameterSetCount = Math.Min(parameterSetCount,
                                                       m_randomStartingPointsCount - previousParameterSetCount);

                var randomParameterSets = RandomSearchOptimizer.SampleRandomParameterSets(
                    randomParameterSetCount, m_parameters, m_sampler);

                return(randomParameterSets);
            }

            // Filter away NaNs, and ensure result order is preserved, when fitting the model.
            var validParameterSets = previousResults
                                     .Where(v => !double.IsNaN(v.Error))
                                     .OrderBy(v => v.Error); // TODO: This might still fail to provide same order if two different parameter sets yield the same error.

            var model = FitModel(validParameterSets);

            return(GenerateCandidateParameterSets(parameterSetCount, validParameterSets.ToList(), model));
        }
Exemplo n.º 2
0
        double[][] GreedyPlusRandomSearch(double[][] parentParameterSets, RegressionForestModel model,
                                          int parameterSetCount, IReadOnlyList <OptimizerResult> previousResults)
        {
            // TODO: Handle maximization and minimization. Currently minimizes.
            var best = previousResults.Min(v => v.Error);

            var parameterSets = new List <(double[] parameterSet, double EI)>();

            // Perform local search.
            foreach (var parameterSet in parentParameterSets)
            {
                var bestParameterSet = LocalSearch(parentParameterSets, model, best, m_epsilon);
                parameterSets.Add(bestParameterSet);
            }

            // Additional set of random parameterSets to choose from during local search.
            for (int i = 0; i < m_randomSearchPointCount; i++)
            {
                var parameterSet = RandomSearchOptimizer
                                   .SampleParameterSet(m_parameters, m_sampler);

                var expectedImprovement = ComputeExpectedImprovement(best, parameterSet, model);
                parameterSets.Add((parameterSet, expectedImprovement));
            }

            // Take the best parameterSets. Here we want the max expected improvement.
            return(parameterSets.OrderByDescending(v => v.EI)
                   .Take(parameterSetCount).Select(v => v.parameterSet)
                   .ToArray());
        }
        private OptimizerResult[] FindNextCandidates(RegressionForestModel model, double bestScore)
        {
            // Additional set of random parameterSets to choose from during local search.
            var results = new List <OptimizerResult>();

            for (var i = 0; i < m_randomSearchPointCount; i++)
            {
                var parameterSet = RandomSearchOptimizer.SampleParameterSet(m_parameters, m_sampler);

                var expectedImprovement = ComputeExpectedImprovement(bestScore, parameterSet, model);
                results.Add(new OptimizerResult(parameterSet, expectedImprovement));
            }

            return(results.ToArray());
        }
Exemplo n.º 4
0
        /// <summary>
        /// Optimization using Hyberband.
        /// Returns all results, chronologically ordered.
        /// </summary>
        /// <param name="functionToMinimize"></param>
        /// <returns></returns>
        public OptimizerResult[] Optimize(HyperbandObjectiveFunction functionToMinimize)
        {
            var allResults = new List <OptimizerResult>();

            for (int rounds = m_numberOfRounds; rounds >= 0; rounds--)
            {
                // Initial configurations count.
                var initialConfigurationCount = (int)Math.Ceiling((m_totalBudgetPerRound / m_maximumBudget)
                                                                  * (Math.Pow(m_eta, rounds) / (rounds + 1)));

                // Initial budget per parameter set.
                var initialBudget = m_maximumBudget * Math.Pow(m_eta, -rounds);

                var parameterSets = RandomSearchOptimizer.SampleRandomParameterSets(initialConfigurationCount,
                                                                                    m_parameters, m_sampler);

                var results = new ConcurrentBag <OptimizerResult>();

                var iterations = m_skipLastIterationOfEachRound ? rounds : (rounds + 1);
                for (int iteration = 0; iteration < iterations; iteration++)
                {
                    // Run each of the parameter sets with budget
                    // and keep the best (configurationCount / m_eta) configurations

                    var configurationCount = initialConfigurationCount * Math.Pow(m_eta, -iteration);
                    var budget             = initialBudget * Math.Pow(m_eta, iteration);

                    //Trace.WriteLine($"{(int)Math.Round(configurationCount)} configurations x {budget:F1} budget each");
                    foreach (var parameterSet in parameterSets)
                    {
                        var result = functionToMinimize(parameterSet, budget);
                        results.Add(result);
                    }

                    // Select a number of best configurations for the next loop
                    var configurationsToKeep = (int)Math.Round(configurationCount / m_eta);
                    parameterSets = results.OrderBy(v => v.Error)
                                    .Take(configurationsToKeep)
                                    .Select(v => v.ParameterSet)
                                    .ToArray();
                }

                allResults.AddRange(results);
                //Trace.WriteLine($" Lowest loss so far: {allResults.OrderBy(v => v.Error).First().Error:F4}");
            }

            return(allResults.ToArray());
        }
Exemplo n.º 5
0
        double[][] GenerateCandidateParameterSets(int parameterSetCount,
                                                  IReadOnlyList <OptimizerResult> previousResults, RegressionForestModel model)
        {
            // Get top parameter sets from previous runs.
            var topParameterSets = previousResults.OrderBy(v => v.Error)
                                   .Take(m_localSearchPointCount).Select(v => v.ParameterSet).ToArray();

            // Perform local search using the top parameter sets from previous run.
            var challengerCount = (int)Math.Ceiling(parameterSetCount / 2.0F);
            var challengers     = GreedyPlusRandomSearch(topParameterSets, model,
                                                         challengerCount, previousResults);

            // Create random parameter sets.
            var randomParameterSetCount = parameterSetCount - challengers.Length;
            var randomChallengers       = RandomSearchOptimizer.SampleRandomParameterSets(
                randomParameterSetCount, m_parameters, m_sampler);

            // Interleave challengers and random parameter sets.
            return(InterLeaveModelBasedAndRandomParameterSets(challengers, randomChallengers));
        }
Exemplo n.º 6
0
        /// <summary>
        /// Propose a new list of parameter sets.
        /// </summary>
        /// <param name="parameterSetCount">The number of parameter sets to propose</param>
        /// <param name="previousResults">Results from previous runs.
        /// These are used in the model for proposing new parameter sets.
        /// If no results are provided, random parameter sets will be returned.</param>
        /// <returns></returns>
        public double[][] ProposeParameterSets(int parameterSetCount,
                                               IReadOnlyList <OptimizerResult> previousResults = null)
        {
            var previousParameterSetCount = previousResults == null ? 0 : previousResults.Count;

            if (previousParameterSetCount < m_randomStartingPointsCount)
            {
                var randomParameterSetCount = Math.Min(parameterSetCount,
                                                       m_randomStartingPointsCount - previousParameterSetCount);

                var randomParameterSets = RandomSearchOptimizer.SampleRandomParameterSets(
                    randomParameterSetCount, m_parameters, m_sampler);

                return(randomParameterSets);
            }

            var validParameterSets = previousResults.Where(v => !double.IsNaN(v.Error));
            var model = FitModel(validParameterSets);

            return(GenerateCandidateParameterSets(parameterSetCount, validParameterSets.ToList(), model));
        }
Exemplo n.º 7
0
        /// <summary>
        /// Optimization using swarm optimization. Returns results for all particles.
        /// </summary>
        /// <param name="functionToMinimize"></param>
        /// <returns></returns>
        public OptimizerResult[] Optimize(Func <double[], OptimizerResult> functionToMinimize)
        {
            var particles = new double[m_numberOfParticles][];

            var particleVelocities = Enumerable.Range(0, m_numberOfParticles)
                                     .Select(p => new double[m_parameters.Length])
                                     .ToArray();

            // initialize max and min velocities
            var maxParticleVelocities = new double[m_parameters.Length];
            var minParticleVelocities = new double[m_parameters.Length];

            for (int i = 0; i < m_parameters.Length; i++)
            {
                maxParticleVelocities[i] = Math.Abs(m_parameters[i].Max - m_parameters[i].Min);
                minParticleVelocities[i] = -maxParticleVelocities[i];
            }

            // initialize max and min parameter bounds
            var maxParameters = new double[m_parameters.Length];
            var minParameters = new double[m_parameters.Length];

            for (int i = 0; i < m_parameters.Length; i++)
            {
                maxParameters[i] = m_parameters[i].Max;
                minParameters[i] = m_parameters[i].Min;
            }

            var pBest = Enumerable.Range(0, m_numberOfParticles)
                        .Select(p => new double[m_parameters.Length])
                        .ToArray();

            var pBestScores = Enumerable.Range(0, m_numberOfParticles)
                              .Select(p => double.MaxValue)
                              .ToArray();

            var gBest = new OptimizerResult(new double[m_parameters.Length], double.MaxValue);

            // random initialize particles
            for (int i = 0; i < m_numberOfParticles; i++)
            {
                particles[i] = RandomSearchOptimizer.SampleParameterSet(m_parameters, m_sampler);
            }

            // iterate for find best
            for (int iterations = 0; iterations < m_maxIterations; iterations++)
            {
                Parallel.For(0, m_numberOfParticles, new ParallelOptions {
                    MaxDegreeOfParallelism = m_maxDegreeOfParallelism
                }, (i) =>
                {
                    var result = functionToMinimize(particles[i]);
                    lock (m_bestLocker)
                    {
                        if (result.Error < pBestScores[i])
                        {
                            pBest[i]       = result.ParameterSet;
                            pBestScores[i] = result.Error;
                        }

                        if (result.Error < gBest.Error)
                        {
                            gBest = new OptimizerResult(result.ParameterSet.ToArray(), result.Error);
                            //Trace.WriteLine(gBest.Error);
                        }
                    }
                });

                for (int i = 0; i < m_numberOfParticles; i++)
                {
                    //v[] = v[] + c1 * rand() * (pbest[] - present[]) + c2 * rand() * (gbest[] - present[])
                    particleVelocities[i] = particleVelocities[i].Add(pBest[i].Subtract(particles[i]).Multiply(m_c1 * m_random.NextDouble())
                                                                      .Add(gBest.ParameterSet.Subtract(particles[i]).Multiply(m_c2 * m_random.NextDouble())));

                    BoundCheck(particleVelocities[i], maxParticleVelocities, minParticleVelocities);

                    //present[] = persent[] + v[]
                    particles[i] = particles[i].Add(particleVelocities[i]);
                    BoundCheck(particles[i], maxParameters, minParameters);
                }
            }

            var results = new List <OptimizerResult>();

            for (int i = 0; i < m_numberOfParticles; i++)
            {
                results.Add(new OptimizerResult(pBest[i], pBestScores[i]));
            }

            return(results.ToArray());
        }
Exemplo n.º 8
0
        /// <summary>
        /// Optimization using Sequential Model-based optimization.
        /// Returns all results, chronologically ordered.
        /// </summary>
        /// <param name="functionToMinimize"></param>
        /// <returns></returns>
        public OptimizerResult[] Optimize(Func <double[], OptimizerResult> functionToMinimize)
        {
            var bestParameterSet      = new double[m_parameters.Length];
            var bestParameterSetScore = double.MaxValue;

            var parameterSets      = new List <double[]>();
            var parameterSetScores = new List <double>();

            var usePreviousResults = m_previousParameterSetScores != null && m_previousParameterSets != null;

            int iterations = 0;

            if (usePreviousResults)
            {
                parameterSets.AddRange(m_previousParameterSets);
                parameterSetScores.AddRange(m_previousParameterSetScores);

                for (int i = 0; i < parameterSets.Count; i++)
                {
                    var score = parameterSetScores[i];
                    if (!double.IsNaN(score))
                    {
                        if (score < bestParameterSetScore)
                        {
                            bestParameterSetScore = score;
                            bestParameterSet      = parameterSets[i];
                        }
                    }
                }
            }
            else
            {
                // initialize random starting points for the first iteration
                for (int i = 0; i < m_randomStartingPointCount; i++)
                {
                    var set   = RandomSearchOptimizer.SampleParameterSet(m_parameters, m_sampler);
                    var score = functionToMinimize(set).Error;
                    iterations++;

                    if (!double.IsNaN(score))
                    {
                        parameterSets.Add(set);
                        parameterSetScores.Add(score);

                        if (score < bestParameterSetScore)
                        {
                            bestParameterSetScore = score;
                            bestParameterSet      = set;
                        }
                    }
                }
            }

            var lastSet = new double[m_parameters.Length];

            for (int iteration = 0; iteration < m_iterations; iteration++)
            {
                // fit model
                var observations = parameterSets.ToF64Matrix();
                var targets      = parameterSetScores.ToArray();
                var model        = m_learner.Learn(observations, targets);

                var bestScore  = parameterSetScores.Min();
                var candidates = FindNextCandidates(model, bestScore);

                var first = true;

                foreach (var candidate in candidates)
                {
                    var parameterSet = candidate.ParameterSet;

                    if (Equals(lastSet, parameterSet) && !first)
                    {
                        // skip evaluation if parameters have not changed.
                        continue;
                    }

                    if (Equals(bestParameterSet, parameterSet))
                    {
                        // if the beset parameter set is sampled again.
                        // Add a new random parameter set.
                        parameterSet = RandomSearchOptimizer
                                       .SampleParameterSet(m_parameters, m_sampler);
                    }

                    var result = functionToMinimize(parameterSet);
                    iterations++;

                    if (!double.IsNaN(result.Error))
                    {
                        // update best
                        if (result.Error < bestParameterSetScore)
                        {
                            bestParameterSetScore = result.Error;
                            bestParameterSet      = result.ParameterSet;
                            //System.Diagnostics.Trace.WriteLine(iterations + ";" + result.Error);
                        }

                        // add point to parameter set list for next iterations model
                        parameterSets.Add(result.ParameterSet);
                        parameterSetScores.Add(result.Error);
                    }

                    lastSet = parameterSet;
                    first   = false;
                }
            }

            var results = new List <OptimizerResult>();

            for (int i = 0; i < parameterSets.Count; i++)
            {
                results.Add(new OptimizerResult(parameterSets[i], parameterSetScores[i]));
            }

            return(results.ToArray());
        }
Exemplo n.º 9
0
        /// <summary>
        /// Optimization using Sequential Model-based optimization.
        /// Returns all results, chronologically ordered.
        /// </summary>
        /// <param name="functionToMinimize"></param>
        /// <returns></returns>
        public OptimizerResult[] Optimize(Func <double[], OptimizerResult> functionToMinimize)
        {
            var parameterSets      = new BlockingCollection <(double[] Parameters, double Error)>();
            var usePreviousResults = m_previousParameterSetScores != null && m_previousParameterSets != null;

            int iterations = 0;

            if (usePreviousResults)
            {
                for (int i = 0; i < m_previousParameterSets.Count; i++)
                {
                    var score = m_previousParameterSetScores[i];
                    if (!double.IsNaN(score))
                    {
                        parameterSets.Add((m_previousParameterSets[i], score));
                    }
                }
            }
            else
            {
                // initialize random starting points for the first iteration
                Parallel.For(0, m_randomStartingPointCount, m_parallelOptions, i =>
                {
                    var set   = RandomSearchOptimizer.SampleParameterSet(m_parameters, m_sampler);
                    var score = functionToMinimize(set).Error;
                    iterations++;

                    if (!double.IsNaN(score))
                    {
                        parameterSets.Add((set, score));
                    }
                });
            }
            for (int iteration = 0; iteration < m_iterations; iteration++)
            {
                // fit model
                var observations = parameterSets.Select(s => s.Parameters).ToList().ToF64Matrix();
                var targets      = parameterSets.Select(s => s.Error).ToArray();
                var model        = m_learner.Learn(observations, targets);

                var bestScore  = parameterSets.Min(m => m.Error);
                var candidates = FindNextCandidates(model, bestScore);

                m_isFirst = true;

                Parallel.ForEach(candidates, m_parallelOptions, candidate =>
                {
                    var parameterSet = candidate.ParameterSet;

                    // skip evaluation if parameters have not changed unless explicitly allowed
                    if (m_allowMultipleEvaluations || IsFirstEvaluation() || !Contains(parameterSets, parameterSet))
                    {
                        if (!m_allowMultipleEvaluations && Equals(GetBestParameterSet(parameterSets), parameterSet))
                        {
                            // if the best parameter set is sampled again.
                            // Add a new random parameter set.
                            parameterSet = RandomSearchOptimizer
                                           .SampleParameterSet(m_parameters, m_sampler);
                        }

                        var result = functionToMinimize(parameterSet);
                        iterations++;

                        if (!double.IsNaN(result.Error))
                        {
                            // add point to parameter set list for next iterations model
                            parameterSets.Add((parameterSet, result.Error));
                        }
                    }