Пример #1
0
        /// <summary>
        /// Propose a new list of parameter sets.
        /// </summary>
        /// <param name="parameterSetCount">The number of parameter sets to propose</param>
        /// <param name="previousResults">Results from previous runs.
        /// These are used in the model for proposing new parameter sets.
        /// If no results are provided, random parameter sets will be returned.</param>
        /// <returns></returns>
        public IEnumerable <double[]> ProposeParameterSets(int parameterSetCount,
                                                           IReadOnlyList <OptimizerResult> previousResults = null)
        {
            var previousParameterSetCount = previousResults == null ? 0 : previousResults.Count;

            if (previousParameterSetCount < m_randomStartingPointsCount)
            {
                var randomParameterSetCount = Math.Min(parameterSetCount,
                                                       m_randomStartingPointsCount - previousParameterSetCount);

                var randomParameterSets = RandomSearchOptimizer.SampleRandomParameterSets(
                    randomParameterSetCount, m_parameters, m_sampler);

                return(randomParameterSets);
            }

            // Filter away NaNs, and ensure result order is preserved, when fitting the model.
            var validParameterSets = previousResults
                                     .Where(v => !double.IsNaN(v.Error))
                                     .OrderBy(v => v.Error); // TODO: This might still fail to provide same order if two different parameter sets yield the same error.

            var model = FitModel(validParameterSets);

            return(GenerateCandidateParameterSets(parameterSetCount, validParameterSets.ToList(), model));
        }
Пример #2
0
        /// <summary>
        /// Optimization using Hyberband.
        /// Returns all results, chronologically ordered.
        /// </summary>
        /// <param name="functionToMinimize"></param>
        /// <returns></returns>
        public OptimizerResult[] Optimize(HyperbandObjectiveFunction functionToMinimize)
        {
            var allResults = new List <OptimizerResult>();

            for (int rounds = m_numberOfRounds; rounds >= 0; rounds--)
            {
                // Initial configurations count.
                var initialConfigurationCount = (int)Math.Ceiling((m_totalBudgetPerRound / m_maximumBudget)
                                                                  * (Math.Pow(m_eta, rounds) / (rounds + 1)));

                // Initial budget per parameter set.
                var initialBudget = m_maximumBudget * Math.Pow(m_eta, -rounds);

                var parameterSets = RandomSearchOptimizer.SampleRandomParameterSets(initialConfigurationCount,
                                                                                    m_parameters, m_sampler);

                var results = new ConcurrentBag <OptimizerResult>();

                var iterations = m_skipLastIterationOfEachRound ? rounds : (rounds + 1);
                for (int iteration = 0; iteration < iterations; iteration++)
                {
                    // Run each of the parameter sets with budget
                    // and keep the best (configurationCount / m_eta) configurations

                    var configurationCount = initialConfigurationCount * Math.Pow(m_eta, -iteration);
                    var budget             = initialBudget * Math.Pow(m_eta, iteration);

                    //Trace.WriteLine($"{(int)Math.Round(configurationCount)} configurations x {budget:F1} budget each");
                    foreach (var parameterSet in parameterSets)
                    {
                        var result = functionToMinimize(parameterSet, budget);
                        results.Add(result);
                    }

                    // Select a number of best configurations for the next loop
                    var configurationsToKeep = (int)Math.Round(configurationCount / m_eta);
                    parameterSets = results.OrderBy(v => v.Error)
                                    .Take(configurationsToKeep)
                                    .Select(v => v.ParameterSet)
                                    .ToArray();
                }

                allResults.AddRange(results);
                //Trace.WriteLine($" Lowest loss so far: {allResults.OrderBy(v => v.Error).First().Error:F4}");
            }

            return(allResults.ToArray());
        }
Пример #3
0
        double[][] GenerateCandidateParameterSets(int parameterSetCount,
                                                  IReadOnlyList <OptimizerResult> previousResults, RegressionForestModel model)
        {
            // Get top parameter sets from previous runs.
            var topParameterSets = previousResults.OrderBy(v => v.Error)
                                   .Take(m_localSearchPointCount).Select(v => v.ParameterSet).ToArray();

            // Perform local search using the top parameter sets from previous run.
            var challengerCount = (int)Math.Ceiling(parameterSetCount / 2.0F);
            var challengers     = GreedyPlusRandomSearch(topParameterSets, model,
                                                         challengerCount, previousResults);

            // Create random parameter sets.
            var randomParameterSetCount = parameterSetCount - challengers.Length;
            var randomChallengers       = RandomSearchOptimizer.SampleRandomParameterSets(
                randomParameterSetCount, m_parameters, m_sampler);

            // Interleave challengers and random parameter sets.
            return(InterLeaveModelBasedAndRandomParameterSets(challengers, randomChallengers));
        }
Пример #4
0
        /// <summary>
        /// Propose a new list of parameter sets.
        /// </summary>
        /// <param name="parameterSetCount">The number of parameter sets to propose</param>
        /// <param name="previousResults">Results from previous runs.
        /// These are used in the model for proposing new parameter sets.
        /// If no results are provided, random parameter sets will be returned.</param>
        /// <returns></returns>
        public double[][] ProposeParameterSets(int parameterSetCount,
                                               IReadOnlyList <OptimizerResult> previousResults = null)
        {
            var previousParameterSetCount = previousResults == null ? 0 : previousResults.Count;

            if (previousParameterSetCount < m_randomStartingPointsCount)
            {
                var randomParameterSetCount = Math.Min(parameterSetCount,
                                                       m_randomStartingPointsCount - previousParameterSetCount);

                var randomParameterSets = RandomSearchOptimizer.SampleRandomParameterSets(
                    randomParameterSetCount, m_parameters, m_sampler);

                return(randomParameterSets);
            }

            var validParameterSets = previousResults.Where(v => !double.IsNaN(v.Error));
            var model = FitModel(validParameterSets);

            return(GenerateCandidateParameterSets(parameterSetCount, validParameterSets.ToList(), model));
        }