OptimizerResult RunOpenLoopOptimizationTest(List <OptimizerResult> results)
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
            };

            var iterations = 80;
            var randomStartingPointsCount            = 20;
            var functionEvaluationsPerIterationCount = 1;

            var sut = new BayesianOptimizer(parameters,
                                            iterations: iterations,
                                            randomStartingPointCount: randomStartingPointsCount,
                                            functionEvaluationsPerIterationCount: functionEvaluationsPerIterationCount,
                                            randomSearchPointCount: 1000,
                                            seed: 42);

            // Using BayesianOptimizer in an open loop.
            var initialParameterSets  = sut.ProposeParameterSets(randomStartingPointsCount, results);
            var initializationResults = sut.RunParameterSets(Minimize, initialParameterSets);

            results.AddRange(initializationResults);

            for (int i = 0; i < iterations; i++)
            {
                var parameterSets    = sut.ProposeParameterSets(functionEvaluationsPerIterationCount, results);
                var iterationResults = sut.RunParameterSets(Minimize, parameterSets);
                results.AddRange(iterationResults);
            }

            return(results.Where(v => !double.IsNaN(v.Error)).OrderBy(r => r.Error).First());
        }
        public void BayesianOptimizer_OptimizeBest_MultipleParameters(bool runParallel)
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
            };

            var sut = new BayesianOptimizer(parameters,
                                            iterations: 100,
                                            randomStartingPointCount: 5,
                                            functionEvaluationsPerIterationCount: 1,
                                            randomSearchPointCount: 1000,
                                            seed: 42,
                                            runParallel: runParallel);

            var actual = sut.OptimizeBest(Minimize);

            Assert.AreEqual(-0.76070603822760785, actual.Error, Delta);
            Assert.AreEqual(3, actual.ParameterSet.Length);

            Assert.AreEqual(1.6078245041928358, actual.ParameterSet[0], Delta);
            Assert.AreEqual(-8.9735394990879769, actual.ParameterSet[1], Delta);
            Assert.AreEqual(-0.18217921731163855, actual.ParameterSet[2], Delta);
        }
        public void BayesianOptimizer_Optimize()
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
            };

            var sut = new BayesianOptimizer(parameters,
                                            iterations: 120,
                                            randomStartingPointCount: 5,
                                            functionEvaluationsPerIterationCount: 1,
                                            randomSearchPointCount: 1000,
                                            seed: 42,
                                            runParallel: false); // Note, since the returned results are not ordered on error,
                                                                 // running with parallel computations will not return reproducible order of results,
                                                                 // so runParallel must be false for this test.

            var results = sut.Optimize(MinimizeWeightFromHeight);
            var actual  = new OptimizerResult[] { results.First(), results.Last() };

            var expected = new OptimizerResult[]
            {
                new OptimizerResult(new double[] { 43.216748276360683 }, 1352.8306605984087),
                new OptimizerResult(new double[] { 38.201425707992833 }, 119.1316225267316)
            };

            Assert.AreEqual(expected.First().Error, actual.First().Error, Delta);
            Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), Delta);

            Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta);
            Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), Delta);
        }
Esempio n. 4
0
        public override double Evaluate(IChromosome chromosome)
        {
            try
            {
                var parameters = Config.Genes.Select(s =>
                                                     new MinMaxParameterSpec(min: (double)(s.MinDecimal ?? s.MinInt.Value), max: (double)(s.MaxDecimal ?? s.MaxInt.Value),
                                                                             transform: Transform.Linear, parameterType: s.Precision > 0 ? ParameterType.Continuous : ParameterType.Discrete)
                                                     ).ToArray();


                IOptimizer optimizer = null;
                if (Config.Fitness != null)
                {
                    if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.RandomSearch.ToString())
                    {
                        optimizer = new RandomSearchOptimizer(parameters, iterations: Config.Generations, seed: 42, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.ParticleSwarm.ToString())
                    {
                        optimizer = new ParticleSwarmOptimizer(parameters, maxIterations: Config.Generations, numberOfParticles: Config.PopulationSize,
                                                               seed: 42, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Bayesian.ToString())
                    {
                        optimizer = new BayesianOptimizer(parameters, maxIterations: Config.Generations, numberOfStartingPoints: Config.PopulationSize, seed: 42);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.GlobalizedBoundedNelderMead.ToString())
                    {
                        optimizer = new GlobalizedBoundedNelderMeadOptimizer(parameters, maxRestarts: Config.Generations,
                                                                             maxIterationsPrRestart: Config.PopulationSize, seed: 42, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Genetic.ToString())
                    {
                        throw new Exception("Genetic optimizer cannot be used with Sharpe Maximizer");
                    }
                }

                //todo:
                // GridSearchOptimizer?

                Func <double[], OptimizerResult> minimize = p => Minimize(p, (Chromosome)chromosome);

                // run optimizer
                var result = optimizer.OptimizeBest(minimize);

                Best = ToChromosome(result, chromosome);

                return(result.Error);
            }
            catch (Exception ex)
            {
                Program.Logger.Error(ex);
                return(ErrorFitness);
            }
        }
Esempio n. 5
0
        public void BayesianOptimizer_OptimizeNonDeterministicInParallel()
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(0, 1, Transform.Linear, ParameterType.Discrete)
            };
            var sut = new BayesianOptimizer(parameters, iterations: 240, randomStartingPointCount: 5, functionEvaluationsPerIteration: 5,
                                            seed: Seed, maxDegreeOfParallelism: -1, allowMultipleEvaluations: true);
            var results = sut.Optimize(p => MinimizeNonDeterministic(p, Random));
            var actual = new OptimizerResult[] { results.First(), results.Last() }.OrderByDescending(o => o.Error);

            Assert.AreEqual(1, actual.First().Error);
            Assert.AreEqual(1, (int)actual.First().ParameterSet[0]);
        }
Esempio n. 6
0
        public async Task <IterationResult> Start(IOptimizerConfiguration config, CancellationToken cancellationToken)
        {
            CancellationToken = cancellationToken;

            var parameters = config.Genes.Select(s =>
                                                 new MinMaxParameterSpec(min: s.Min ?? s.Actual.Value, max: s.Max ?? s.Actual.Value,
                                                                         transform: Transform.Linear, parameterType: s.Precision > 0 ? ParameterType.Continuous : ParameterType.Discrete)
                                                 ).ToArray();

            Keys = config.Genes.Where(g => g.Key != "id").Select(s => s.Key);

            IOptimizer optimizerMethod = null;

            if (config.Fitness != null)
            {
                if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.RandomSearch.ToString())
                {
                    optimizerMethod = new RandomSearchOptimizer(parameters, iterations: config.Generations, seed: 42, runParallel: false);
                }
                else if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.ParticleSwarm.ToString())
                {
                    optimizerMethod = new ParticleSwarmOptimizer(parameters, maxIterations: config.Generations, numberOfParticles: config.PopulationSize,
                                                                 seed: 42, maxDegreeOfParallelism: 1);
                }
                else if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Bayesian.ToString())
                {
                    optimizerMethod = new BayesianOptimizer(parameters: parameters, iterations: config.Generations, randomStartingPointCount: config.PopulationSize,
                                                            functionEvaluationsPerIterationCount: config.PopulationSize, seed: 42, runParallel: false);
                }
                else if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.GlobalizedBoundedNelderMead.ToString())
                {
                    optimizerMethod = new GlobalizedBoundedNelderMeadOptimizer(parameters, maxRestarts: config.Generations,
                                                                               maxIterationsPrRestart: config.PopulationSize, seed: 42, maxDegreeOfParallelism: 1);
                }
                else if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.GridSearch.ToString())
                {
                    optimizerMethod = new GridSearchOptimizer(config.Genes.Select(s => new GridParameterSpec(RangeWithPrecision.Range(s.Min.Value, s.Max.Value, s.Precision.Value).ToArray())).ToArray(), runParallel: false);
                }
            }
            else
            {
                throw new ArgumentException("No optimizer was configured.");
            }

            var result = await optimizerMethod.OptimizeBest(Minimize);

            return(new IterationResult {
                ParameterSet = result.ParameterSet, Cost = IsMaximizing ? result.Error * -1 : result.Error
            });
        }
Esempio n. 7
0
        public void BayesianOptimizer_OptimizeBest()
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
            };
            var sut    = new BayesianOptimizer(parameters, 100, 5, 1, maxDegreeOfParallelism: 1);
            var actual = sut.OptimizeBest(Minimize);

            Assert.AreEqual(-0.74765422244251278, actual.Error, 0.0001);
            Assert.AreEqual(3, actual.ParameterSet.Length);

            Assert.AreEqual(-5.00656832708352, actual.ParameterSet[0], Delta);
            Assert.AreEqual(-9.67008227467075, actual.ParameterSet[1], Delta);
            Assert.AreEqual(-0.241737044528936, actual.ParameterSet[2], Delta);
        }
        public void BayesianOptimizer_OptimizeBest()
        {
            var parameters = new double[][]
            {
                new double[] { -10.0, 10.0 },
                new double[] { -10.0, 10.0 },
                new double[] { -10.0, 10.0 }
            };
            var sut    = new BayesianOptimizer(parameters, 100, 5, 1);
            var actual = sut.OptimizeBest(Minimize);

            Assert.AreEqual(actual.Error, -0.92327107866106661, 0.0001);
            Assert.AreEqual(actual.ParameterSet.Length, 3);

            Assert.AreEqual(actual.ParameterSet[0], 8.1239613509382878, 0.0001);
            Assert.AreEqual(actual.ParameterSet[1], -9.2896384835660637, 0.0001);
            Assert.AreEqual(actual.ParameterSet[2], -0.03435398919245003, 0.0001);
        }
Esempio n. 9
0
        public void BayesianOptimizer_OptimizeBest()
        {
            var parameters = new ParameterBounds[]
            {
                new ParameterBounds(-10.0, 10.0, Transform.Linear),
                new ParameterBounds(-10.0, 10.0, Transform.Linear),
                new ParameterBounds(-10.0, 10.0, Transform.Linear),
            };
            var sut    = new BayesianOptimizer(parameters, 100, 5, 1);
            var actual = sut.OptimizeBest(Minimize);

            Assert.AreEqual(actual.Error, -0.74765422244251278, 0.0001);
            Assert.AreEqual(actual.ParameterSet.Length, 3);

            Assert.AreEqual(actual.ParameterSet[0], -5.0065683270835173, m_delta);
            Assert.AreEqual(actual.ParameterSet[1], -9.67008227467075, m_delta);
            Assert.AreEqual(actual.ParameterSet[2], -0.24173704452893574, m_delta);
        }
        public void BayesianOptimizer_Optimize()
        {
            var parameters = new double[][] { new double[] { 0.0, 100.0 } };
            var sut        = new BayesianOptimizer(parameters, 120, 5, 1);
            var results    = sut.Optimize(Minimize2);
            var actual     = new OptimizerResult[] { results.First(), results.Last() };

            var expected = new OptimizerResult[]
            {
                new OptimizerResult(new double[] { 37.710969353891429 }, 109.34400835405613),
                new OptimizerResult(new double[] { 99.646240426062718 }, 157577.44222424511)
            };

            Assert.AreEqual(expected.First().Error, actual.First().Error, 0.0001);
            Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), 0.0001);

            Assert.AreEqual(expected.Last().Error, actual.Last().Error, 0.0001);
            Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), 0.0001);
        }
Esempio n. 11
0
        public void BayesianOptimizer_OptimizeBestInParallel()
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
                new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
            };
            //high variance without fixed seed
            var sut    = new BayesianOptimizer(parameters, 100, 5, 2, seed: Seed);
            var actual = sut.OptimizeBest(Minimize);

            Assert.AreEqual(-0.736123479387088, actual.Error, Delta);
            Assert.AreEqual(3, actual.ParameterSet.Length);

            Assert.AreEqual(-4.37019084318084, actual.ParameterSet[0], Delta);
            Assert.AreEqual(-3.05224638108734, actual.ParameterSet[1], Delta);
            Assert.AreEqual(0.274598500819224, actual.ParameterSet[2], Delta);
        }
        public void BayesianOptimizer_OptimizeBest_SingleParameter(bool runParallel)
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
            };

            var sut = new BayesianOptimizer(parameters,
                                            iterations: 80,
                                            randomStartingPointCount: 20,
                                            functionEvaluationsPerIterationCount: 1,
                                            randomSearchPointCount: 1000,
                                            seed: 42,
                                            runParallel: runParallel);

            var actual = sut.OptimizeBest(MinimizeWeightFromHeight);

            Assert.AreEqual(126.50056735005998, actual.Error, Delta);
            Assert.AreEqual(38.359608938153649, actual.ParameterSet.Single(), Delta);
        }
Esempio n. 13
0
        public void BayesianOptimizer_Optimize()
        {
            var parameters = new ParameterBounds[]
            {
                new ParameterBounds(0.0, 100.0, Transform.Linear)
            };
            var sut     = new BayesianOptimizer(parameters, 120, 5, 1);
            var results = sut.Optimize(Minimize2);
            var actual  = new OptimizerResult[] { results.First(), results.Last() };

            var expected = new OptimizerResult[]
            {
                new OptimizerResult(new double[] { 42.323589763754789 }, 981.97873691815118),
                new OptimizerResult(new double[] { 99.110398813667885 }, 154864.41962974239)
            };

            Assert.AreEqual(expected.First().Error, actual.First().Error, m_delta);
            Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), m_delta);

            Assert.AreEqual(expected.Last().Error, actual.Last().Error, m_delta);
            Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), m_delta);
        }
        public void BayesianOptimizer_Optimize()
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
            };
            var sut     = new BayesianOptimizer(parameters, 120, 5, 1);
            var results = sut.Optimize(Minimize2);
            var actual  = new OptimizerResult[] { results.First(), results.Last() };

            var expected = new OptimizerResult[]
            {
                new OptimizerResult(new double[] { 90.513222660177 }, 114559.431919558),
                new OptimizerResult(new double[] { 24.204380402436 }, 7601.008090362)
            };

            Assert.AreEqual(expected.First().Error, actual.First().Error, m_delta);
            Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), m_delta);

            Assert.AreEqual(expected.Last().Error, actual.Last().Error, m_delta);
            Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), m_delta);
        }
Esempio n. 15
0
        static BayesianOptimizer CreateSut(
            int?maybeMaxDegreeOfParallelism,
            MinMaxParameterSpec[] parameters)
        {
            const int DefaultMaxDegreeOfParallelism = -1;

            var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism.HasValue ?
                                         maybeMaxDegreeOfParallelism.Value : DefaultMaxDegreeOfParallelism;

            var runParallel = maybeMaxDegreeOfParallelism.HasValue;

            var sut = new BayesianOptimizer(parameters,
                                            iterations: 30,
                                            randomStartingPointCount: 5,
                                            functionEvaluationsPerIterationCount: 5,
                                            randomSearchPointCount: 1000,
                                            seed: 42,
                                            runParallel: runParallel,
                                            maxDegreeOfParallelism: maxDegreeOfParallelism);

            return(sut);
        }
Esempio n. 16
0
        public void BayesianOptimizer_Optimize()
        {
            var parameters = new MinMaxParameterSpec[]
            {
                new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
            };
            var sut = new BayesianOptimizer(parameters, 120, 5, 1, maxDegreeOfParallelism: 1);
            var results = sut.Optimize(MinimizeWeightFromHeight);
            var actual = new OptimizerResult[] { results.First(), results.Last() }.OrderByDescending(o => o.Error);

            var expected = new OptimizerResult[]
            {
                new OptimizerResult(new double[] { 90.513222660177 }, 114559.431919558),
                new OptimizerResult(new double[] { 24.2043804024367 }, 7601.00809036235)
            };

            Assert.AreEqual(expected.First().Error, actual.First().Error, Delta);
            Assert.AreEqual(expected.First().ParameterSet.First(),
                            actual.First().ParameterSet.First(), Delta);

            Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta);
            Assert.AreEqual(expected.Last().ParameterSet.First(),
                            actual.Last().ParameterSet.First(), Delta);
        }
 public void BayesianOptimizer_ArgumentCheck_Iterations()
 {
     var sut = new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) },
                                     0);
 }
 public void BayesianOptimizer_ArgumentCheck_RandomSearchPointCount()
 {
     var sut = new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) },
                                     10, 20, 30, 0);
 }
 public void BayesianOptimizer_ArgumentCheck_FunctionEvaluationsPerIterationCount()
 {
     var sut = new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) },
                                     10, 20, 0);
 }
 public void BayesianOptimizer_ArgumentCheck_ParameterRanges()
 {
     var sut = new BayesianOptimizer(null, 20);
 }
        public override double Evaluate(IChromosome chromosome)
        {
            try
            {
                var parameters = Config.Genes.Select(s =>
                                                     new MinMaxParameterSpec(min: s.Min ?? s.Actual.Value, max: s.Max ?? s.Actual.Value,
                                                                             transform: Transform.Linear, parameterType: s.Precision > 0 ? ParameterType.Continuous : ParameterType.Discrete)
                                                     ).ToArray();


                IOptimizer optimizer = null;
                if (Config.Fitness != null)
                {
                    if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.RandomSearch.ToString())
                    {
                        optimizer = new RandomSearchOptimizer(parameters, iterations: Config.Generations, seed: Seed, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.ParticleSwarm.ToString())
                    {
                        optimizer = new ParticleSwarmOptimizer(parameters, maxIterations: Config.Generations, numberOfParticles: Config.PopulationSize,
                                                               seed: Seed, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Bayesian.ToString())
                    {
                        optimizer = new BayesianOptimizer(parameters: parameters, iterations: Config.Generations, randomStartingPointCount: Config.PopulationSize,
                                                          functionEvaluationsPerIterationCount: Config.PopulationSize, seed: Seed);
                        //optimizer = new BayesianOptimizer(parameters, iterations: Config.Generations, randomStartingPointCount: Config.PopulationSize,
                        //    functionEvaluationsPerIteration: Config.MaxThreads, seed: 42, maxDegreeOfParallelism: Config.MaxThreads, allowMultipleEvaluations: true);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.GlobalizedBoundedNelderMead.ToString())
                    {
                        optimizer = new GlobalizedBoundedNelderMeadOptimizer(parameters, maxRestarts: Config.Generations,
                                                                             maxIterationsPrRestart: Config.PopulationSize, seed: Seed, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Smac.ToString())
                    {
                        optimizer = new SmacOptimizer(parameters, iterations: Config.Generations, randomStartingPointCount: Config.PopulationSize, seed: 42,
                                                      functionEvaluationsPerIterationCount: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.GridSearch.ToString())
                    {
                        optimizer = new GridSearchOptimizer(parameters);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Genetic.ToString())
                    {
                        throw new Exception("Genetic optimizer cannot be used with Sharpe Maximizer");
                    }
                }
                else
                {
                    throw new ArgumentException("No fitness section was configured.");
                }


                Func <double[], OptimizerResult> minimize = p => Minimize(p, (Chromosome)chromosome);

                var result = optimizer.OptimizeBest(minimize);

                Best = MergeFromResult(result, chromosome);

                return(result.Error);
            }
            catch (Exception ex)
            {
                LogProvider.ErrorLogger.Error(ex);
                return(ErrorFitness);
            }
        }