Esempio n. 1
0
        public void Given_min_max_precision_When_creating_range_Then_should_return_all_members()
        {
            var actual   = RangeWithPrecision.Range(1, 2, 1).ToArray();
            var expected = new double[] { 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0 };

            expected.ShouldDeepEqual(actual);
        }
Esempio n. 2
0
        public void Given_uneven_min_max_And_high_precision_When_creating_range_Then_should_return_all_members()
        {
            const double min       = 0.056;
            const double max       = 0.967;
            const int    precision = 6;
            var          actual    = RangeWithPrecision.Range(min, max, precision).ToArray();

            Assert.Equal(CalculateLength(min, max, precision), actual.Length);
            Assert.Equal(min, actual[0]);
            Assert.Equal(0.056001, actual[1]);
            Assert.Equal(0.966999, actual.TakeLast(2).ElementAt(0));
            Assert.Equal(max, actual.Last());
        }
Esempio n. 3
0
        public async Task <IterationResult> Start(IOptimizerConfiguration config, CancellationToken cancellationToken)
        {
            CancellationToken = cancellationToken;

            var parameters = config.Genes.Select(s =>
                                                 new MinMaxParameterSpec(min: s.Min ?? s.Actual.Value, max: s.Max ?? s.Actual.Value,
                                                                         transform: Transform.Linear, parameterType: s.Precision > 0 ? ParameterType.Continuous : ParameterType.Discrete)
                                                 ).ToArray();

            Keys = config.Genes.Where(g => g.Key != "id").Select(s => s.Key);

            IOptimizer optimizerMethod = null;

            if (config.Fitness != null)
            {
                if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.RandomSearch.ToString())
                {
                    optimizerMethod = new RandomSearchOptimizer(parameters, iterations: config.Generations, seed: 42, runParallel: false);
                }
                else if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.ParticleSwarm.ToString())
                {
                    optimizerMethod = new ParticleSwarmOptimizer(parameters, maxIterations: config.Generations, numberOfParticles: config.PopulationSize,
                                                                 seed: 42, maxDegreeOfParallelism: 1);
                }
                else if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Bayesian.ToString())
                {
                    optimizerMethod = new BayesianOptimizer(parameters: parameters, iterations: config.Generations, randomStartingPointCount: config.PopulationSize,
                                                            functionEvaluationsPerIterationCount: config.PopulationSize, seed: 42, runParallel: false);
                }
                else if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.GlobalizedBoundedNelderMead.ToString())
                {
                    optimizerMethod = new GlobalizedBoundedNelderMeadOptimizer(parameters, maxRestarts: config.Generations,
                                                                               maxIterationsPrRestart: config.PopulationSize, seed: 42, maxDegreeOfParallelism: 1);
                }
                else if (config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.GridSearch.ToString())
                {
                    optimizerMethod = new GridSearchOptimizer(config.Genes.Select(s => new GridParameterSpec(RangeWithPrecision.Range(s.Min.Value, s.Max.Value, s.Precision.Value).ToArray())).ToArray(), runParallel: false);
                }
            }
            else
            {
                throw new ArgumentException("No optimizer was configured.");
            }

            var result = await optimizerMethod.OptimizeBest(Minimize);

            return(new IterationResult {
                ParameterSet = result.ParameterSet, Cost = IsMaximizing ? result.Error * -1 : result.Error
            });
        }