public void FletcherReevesRosenbrock()
        {
            // starting point for search is somewhere
            var initialTheta = Vector<double>.Build.DenseOfArray(new[] { -1D, 1D });

            // define the hypothesis with default parameter
            var rosenbrockParameter = Vector<double>.Build.DenseOfArray(new[] { 1D, 100D });
            var hypothesis = new RosenbrockHypothesis();

            // cost function is sum of squared errors
            var costFunction = new FunctionValueOptimization<double>(hypothesis, rosenbrockParameter);

            // define the optimization problem
            var problem = new OptimizationProblem<double, IDifferentiableCostFunction<double>>(costFunction, initialTheta);

            // define the line search algorithm
            var lineSearch = new SecantMethod()
            {
                ErrorTolerance = 1E-6D
            };

            // optimize!
            var gd = new FletcherReevesCG(lineSearch)
            {
                MaxIterations = 10000,
                ErrorTolerance = 1E-8D
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;
            coefficients[0].Should().BeApproximately(rosenbrockParameter[0], 1E-5D, "because the Rosenbrock function has a minimum at x={0}, y={1}", rosenbrockParameter[0], Math.Sqrt(rosenbrockParameter[0]));
            coefficients[1].Should().BeApproximately(Math.Sqrt(rosenbrockParameter[0]), 1E-5D, "because the Rosenbrock function has a minimum at x={0}, y={1}", rosenbrockParameter[0], Math.Sqrt(rosenbrockParameter[0]));
        }
        public void LinearRegressionWithResidualSumOfSquares()
        {
            // obtain the test data
            var trainingSet = new List<DataPoint<double>>
            {
                new DataPoint<double>(-1, -1.5),
                new DataPoint<double>(0, 0.5),
                new DataPoint<double>(1, 2.5),
                new DataPoint<double>(2, 4.5),
                new DataPoint<double>(3, 6.5)
            };

            // assume a hypothesis
            var hypothesis = new LinearHypothesis(1);
            var initialCoefficients = Vector<double>.Build.Random(2);

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem<double, IDifferentiableCostFunction<double>>(costFunction, initialCoefficients);

            // optimize!
            var gd = new ResilientErrorGD
            {
                ErrorTolerance = 0.0D
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;
            coefficients[0].Should().BeApproximately(0.5, 1E-6D, "because that's the underlying system's intercept");
            coefficients[1].Should().BeApproximately(2, 1E-6D, "because that's the underlying system's slope");
        }
Ejemplo n.º 3
0
 public SimpleBayesianOptimizerProxy(ModelsDatabase modelsDatabase, OptimizationProblem optimizationProblem, Guid?optimizerId)
 {
     this.modelsDatabase      = modelsDatabase;
     this.optimizationProblem = optimizationProblem;
     OptimizerId = optimizerId;
     optimizerExecutionContext = new SimpleBayesianOptimizerExecutionContext()
     {
         OptimizerId = optimizerId,
     };
     optimizerExecutionContext.ModelVersions.Add(0);
 }
Ejemplo n.º 4
0
        public CutPattern[] FindSolution(IEnumerable <int> pieceLengths, IEnumerable <int> pieceWidths, IEnumerable <int> pieceDemands, int stockLength, int stockWidth, int stockCost)
        {
            int[] pLengths = pieceLengths.ToArray <int>();
            int[] pWidths  = pieceWidths.ToArray <int>();
            int[] pDemands = pieceDemands.ToArray <int>();

            OptimizationProblem problem = new OptimizationProblem(stockLength, stockWidth, stockCost, pLengths, pWidths, pDemands);
            Simplex             simp    = new Simplex(problem);

            CutPattern[] solution = simp.Solve();
            return(solution);
        }
Ejemplo n.º 5
0
        public void Solve_NaiveOpimitizationSolver()
        {
            var problem         = MapColoring.CreateProblem(5);
            var softConstraints = new List <ISoftConstraint <string, int> > {
                new NumUniqueConstraint <string, int>(3, problem.Variables)
            };
            var optimizationProblem = new OptimizationProblem <string, int>(problem.Variables, problem.Constraints, softConstraints, problem.InitialAssignment);

            var solver   = new NaiveConstraintOptimization <string, int>();
            var solution = solver.Optimize(optimizationProblem);

            var assigned = new Dictionary <string, int>();

            foreach (Variable <string, int> v in problem.Variables)
            {
                assigned[v.UserObject] = solution.GetValue(v);
            }


            var adjacencies = new Dictionary <string, List <string> > {
                { "Western Australia", new List <string> {
                      "Northern Territory", "South Australia"
                  } },
                { "Northern Territory", new List <string> {
                      "Western Australia", "South Australia", "Queensland"
                  } },
                { "New South Wales", new List <string> {
                      "Queensland", "South Australia", "Victoria"
                  } },
                { "Queensland", new List <string> {
                      "Northern Territory", "South Australia", "New South Wales"
                  } },
                { "South Australia", new List <string> {
                      "Western Australia", "Northern Territory", "New South Wales", "Queensland", "Victoria"
                  } },
                { "Victoria", new List <string> {
                      "New South Wales", "South Australia"
                  } },
                { "Tasmania", new List <string> {
                  } }
            };


            foreach (string t in adjacencies.Keys)
            {
                var neighbors = adjacencies[t];
                foreach (string n in neighbors)
                {
                    Assert.AreNotEqual(assigned[t], assigned[n]);
                }
            }
            Assert.AreEqual(assigned.Values.Distinct().ToList().Count, 3);
        }
        protected AbstractBinaryCoding(OptimizationProblem problem, int bitsPerContinuousVariable, int bitsPerIntegerVariable)
        {
            this.continuousVariablesCount  = problem.Dimension;
            this.continuousLowerBounds     = problem.LowerBound;
            this.continuousUpperBounds     = problem.UpperBound;
            this.bitsPerContinuousVariable = bitsPerContinuousVariable;
            this.integerVariablesCount     = 0;
            //this.integerUpperBounds = null;
            this.bitsPerIntegerVariable = 0;

            this.quantization = new Quantization(bitsPerContinuousVariable);
        }
Ejemplo n.º 7
0
        public RealUniformRandomInitializer(OptimizationProblem problem, IGenerator randomNumberGenerator)
        {
            //problem.CheckInput();
            this.continuousVariablesCount = problem.Dimension;
            this.lowerBounds = problem.LowerBound;
            this.upperBounds = problem.UpperBound;

            if (randomNumberGenerator == null)
            {
                throw new ArgumentException("The random number generator must not be null");
            }
            this.rng = randomNumberGenerator;
        }
        public RealUniformHaltonInitializer(OptimizationProblem problem,
                                            HaltonPointGenerator pointGenerator)
        {
            this.continuousVariablesCount = problem.Dimension;
            this.lowerBounds = problem.LowerBound;
            this.upperBounds = problem.UpperBound;

            if (pointGenerator == null)
            {
                throw new ArgumentException("The Halton point generator must not be null");
            }
            this.pointGenerator = pointGenerator;
        }
Ejemplo n.º 9
0
        public static void FindSolution(IEnumerable <int> pieceLengths, IEnumerable <int> pieceWidths, IEnumerable <int> pieceDemands, int stockLength, int stockWidth, int stockCost, Worker.Worker worker, out CutPattern[] solution)
        {
            int[] pLengths = pieceLengths.ToArray <int>();
            int[] pWidths  = pieceWidths.ToArray <int>();
            int[] pDemands = pieceDemands.ToArray <int>();

            OptimizationProblem problem = new OptimizationProblem(stockLength, stockWidth, stockCost, pLengths, pWidths, pDemands);
            Simplex             simp    = new Simplex(problem);

            solution = simp.Solve(worker);

            worker.SetMax();
            worker.CallFinish();
        }
Ejemplo n.º 10
0
        /// <inheritdoc/>
        public IOptimizationProblem GetOptimizationProblem()
        {
            OptimizerService.OptimizerInfo optimizerInfo = client.GetOptimizerInfo(optimizerHandle);

            OptimizationProblem optimizationProblem = OptimizerServiceDecoder.DecodeOptimizationProblem(optimizerInfo.OptimizationProblem);

            // Add optimization objectives.
            //
            optimizationProblem.Objectives.AddRange(
                optimizerInfo.OptimizationProblem.Objectives.Select(r =>
                                                                    new OptimizationObjective(r.Name, r.Minimize)));

            return(optimizationProblem);
        }
Ejemplo n.º 11
0
        private BayesianOptimizerProxy CreateRemoteOptimizer(OptimizationProblem optimizationProblem)
        {
            GrpcChannel channel = GrpcChannel.ForAddress(optimizerAddressUri);
            var         client  = new MlosOptimizerService.OptimizerServiceClient(channel);

            OptimizerHandle optimizerHandle = client.CreateOptimizer(
                new CreateOptimizerRequest
            {
                OptimizationProblem = optimizationProblem.ToOptimizerServiceOptimizationProblem(),
                OptimizerConfig     = string.Empty,
            });

            return(new BayesianOptimizerProxy(client, optimizerHandle));
        }
Ejemplo n.º 12
0
        private double[] SolveProgram(OptimizationProblem optimizationProgram)
        {
            // construct initial value for optimization to be the current value of the parameters
            var variables = optimizationProgram.Variables;
            var initial   = GetCurrentValues(variables);

            // run the optimizer
            var minimizer = optimizer.Minimize(
                optimizationProgram.Objective,
                optimizationProgram.Constraints,
                optimizationProgram.Variables,
                initial).Last();

            return(minimizer);
        }
Ejemplo n.º 13
0
        public void PolakRibiereRosenbrockParameterFitWithResidualSumOfSquares()
        {
            // parameter is default Rosenbrock
            var realTheta = Vector <double> .Build.DenseOfArray(new[] { 1D, 105D });

            var initialTheta = Vector <double> .Build.DenseOfArray(new[] { 2D, 200D });

            // define the hypothesis
            var hypothesis = new RosenbrockHypothesis();

            // define a probability distribution
            var distribution = new ContinuousUniform(-10D, 10D);

            // obtain the test data
            const int dataPoints  = 10;
            var       trainingSet = new List <DataPoint <double> >(dataPoints);

            for (int i = 0; i < dataPoints; ++i)
            {
                var inputs = Vector <double> .Build.Random(2, distribution);

                var output = hypothesis.Evaluate(realTheta, inputs);
                trainingSet.Add(new DataPoint <double>(inputs, output));
            }
            ;

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem <double, IDifferentiableCostFunction <double> >(costFunction, initialTheta);

            // define the line search algorithm
            var lineSearch = new SecantMethod();

            // optimize!
            var gd = new PolakRibiereCG(lineSearch)
            {
                ErrorTolerance = 1E-8D
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;

            coefficients[0].Should().BeApproximately(realTheta[0], 1D, "because that's the functions [a] parameter");
            coefficients[1].Should().BeApproximately(realTheta[1], 1D, "because that's the functions [b] parameter");
        }
Ejemplo n.º 14
0
        public NelderMeadSimplex(OptimizationProblem optimizationProblem)
        {
            NelderMeadSimplex.OptProblem = optimizationProblem;
            constants = new SimplexConstant[OptProblem.fitnessFunctions.paramValueDictioSize];
            int count = 0;

            foreach (string subModel in OptProblem.fitnessFunctions.ParamValueDictio.Keys.ToList())
            {
                foreach (string parameterName in OptProblem.fitnessFunctions.ParamValueDictio[subModel].Keys.ToList())
                {
                    OptiParameter completeParam = OptProblem.Opt_Parameters.Find(ii => ii.Name == parameterName);
                    constants[count] = new SimplexConstant((completeParam.Max + completeParam.Min) / 2, (completeParam.Max - completeParam.Min) / 2 / 10);
                    count++;
                }
            }
        }
Ejemplo n.º 15
0
        protected DifferentialEvolutionAlgorithm(OptimizationProblem optimizationProblem, int populationSize,
                                                 double mutationFactor, double crossoverProbability, IConvergenceCriterion convergenceCriterion,
                                                 Random randomNumberGenerator)
        {
            this.dimension     = optimizationProblem.Dimension;
            this.lowerBound    = optimizationProblem.LowerBound;
            this.upperBound    = optimizationProblem.UpperBound;
            this.designFactory = optimizationProblem.DesignFactory;

            this.populationSize       = populationSize;
            this.mutationFactor       = mutationFactor;
            this.crossoverProbability = crossoverProbability;
            this.convergenceCriterion = convergenceCriterion;

            this.randomNumberGenerator = randomNumberGenerator;
        }
Ejemplo n.º 16
0
        public void TestOptimizationProblemNoContext()
        {
            var in1                = new ContinuousDimension("in_1", 0, 10);
            var in2                = new DiscreteDimension("in_2", 1, 20);
            var inputHypergrid     = new Hypergrid("input", in1, in2);
            var out1               = new ContinuousDimension("out_1", -5, 7);
            var objectiveHypergrid = new Hypergrid("output", out1);
            var objectives         = new OptimizationObjective[]
            {
                new OptimizationObjective("out_1", true),
                new OptimizationObjective("nonExistent", false),
            };
            var optimizationProblem = new OptimizationProblem(inputHypergrid, objectiveHypergrid, objectives);
            var serialized          = OptimizerServiceEncoder.EncodeOptimizationProblem(optimizationProblem);
            var deserialized        = OptimizerServiceDecoder.DecodeOptimizationProblem(serialized);

            Assert.Null(deserialized.ContextSpace);
        }
Ejemplo n.º 17
0
        public BoundaryMutation(OptimizationProblem problem, double mutationProbability, IGenerator randomNumberGenerator)
        {
            //problem.CheckInput();
            this.lowerBounds = problem.LowerBound;
            this.upperBounds = problem.UpperBound;

            if (mutationProbability < 0 || mutationProbability > 1)
            {
                throw new ArgumentException("The mutation probability of each gene must belong to the interval [0,1], but was "
                                            + mutationProbability);
            }
            this.mutationProbability = mutationProbability;

            if (randomNumberGenerator == null)
            {
                throw new ArgumentException("The random number generator must not be null");
            }
            this.rng = randomNumberGenerator;
        }
Ejemplo n.º 18
0
        private static void Main(string[] args)
        {
            OptimizationProblem p      = new OptimizationProblem();
            ReliveAlgorithm     solver = new ReliveAlgorithm();

            solver.Solve(p);

            int[] k = new int[p.n];
            for (int i = 0; i < solver.Solution.NoGenes; i++)
            {
                k[i] = (int)solver.Solution.RealGenes[i];
            }

            StreamWriter sw = new StreamWriter("result.txt");

            Console.WriteLine("Maximizing reliability\r\n");

            double cost = 0;

            for (int i = 0; i < solver.Solution.NoGenes; i++)
            {
                cost += p.c[i] * k[i];
            }

            Console.Write("Solution:\r\n");

            for (int i = 0; i < solver.Solution.NoGenes; i++)
            {
                Console.Write($"{k[i]}, ");
                sw.Write($"{k[i]}, ");
            }

            Console.WriteLine($"\r\nCost: {cost:F0}\r\nReliability: {solver.Solution.Fitness:F4}\r\n");
            sw.WriteLine($"\r\nCost: {cost:F0}\r\nReliability: {solver.Solution.Fitness:F4}\r\n");

            double eff = p.ComputeEfficiency(k);

            Console.WriteLine($"Efficiency: {eff:F2}\r\n");
            sw.WriteLine($"Efficiency: {eff:F2}\r\n");

            sw.Close();
        }
        public void UnivariateExponentialRegressionWithResidualSumOfSquares()
        {
            var theta = Vector <double> .Build.DenseOfArray(new[] { 0D, 13500D, -1.7D });

            var initialTheta = Vector <double> .Build.DenseOfArray(new[] { 0D, 10000D, -1D });

            // define the hypothesis
            var hypothesis = new UnivariateExponentialHypothesis();

            // define a probability distribution
            var distribution = new ContinuousUniform(0D, 1000D);

            // obtain the test data
            const int dataPoints  = 100;
            var       trainingSet = new List <DataPoint <double> >(dataPoints);

            for (int i = 0; i < dataPoints; ++i)
            {
                var inputs = Vector <double> .Build.Random(1, distribution);

                var output = hypothesis.Evaluate(theta, inputs);
                trainingSet.Add(new DataPoint <double>(inputs, output));
            }
            ;

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem <double, IDifferentiableCostFunction <double> >(costFunction, initialTheta);

            // optimize!
            var gd     = new HagerZhangCG();
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;

            coefficients[1].Should().BeApproximately(theta[1], 1000D, "because that's the underlying system's [a] parameter");
            coefficients[2].Should().BeApproximately(theta[2], 1E-2D, "because that's the underlying system's [b] parameter");
            coefficients[0].Should().BeApproximately(theta[0], 1E-5D, "because that's the underlying system's offset");
        }
Ejemplo n.º 20
0
        public void DualLinearRegressionWithResidualSumOfSquares()
        {
            // obtain the test data
            var trainingSet = new List <DataPoint <double> >
            {
                new DataPoint <double>(-1, new [] { -1.5, -1.0 }),
                new DataPoint <double>(0, new [] { 0.5, 1.0 }),
                new DataPoint <double>(1, new [] { 2.5, 3.0 }),
                new DataPoint <double>(2, new [] { 4.5, 5.0 }),
                new DataPoint <double>(3, new [] { 6.5, 6.0 })
            };

            // assume a hypothesis
            var hypothesis          = new DualLinearHypothesis(1);
            var initialCoefficients = Vector <double> .Build.Random(2);

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem <double, IDifferentiableCostFunction <double> >(costFunction, initialCoefficients);

            // define the line search algorithm
            var lineSearch = new SecantMethod()
            {
                ErrorTolerance = 1E-7
            };

            // optimize!
            var gd = new FletcherReevesCG(lineSearch)
            {
                ErrorTolerance = 1E-7
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;

            coefficients[0].Should().BeApproximately(0.5, 1E-6D, "because that's the underlying system's intercept");
            coefficients[1].Should().BeApproximately(2, 1E-6D, "because that's the underlying system's slope");
        }
Ejemplo n.º 21
0
 public void Report(OptimizationProblem problem)
 {
     WriteLine("");
     WriteLine("Report for problem " + problem.Name);
     WriteLine("================================================");
     WriteLine("");
     WriteLine("Objective Function");
     WriteLine(String.Format("F = {0,12:G6} [{1}]", problem.ObjectiveFunction.Eval(new Evaluator()), problem.ObjectiveFunction.ToString()));
     WriteLine("");
     WriteLine("Constraints");
     foreach (var constraint in problem.Constraints)
     {
         WriteLine(String.Format("C = {0,12:G6} [{1}]", constraint.Residual(new Evaluator()), constraint.ToString()));
     }
     WriteLine("");
     WriteLine("Decisions");
     foreach (var variable in problem.DecisionVariables)
     {
         WriteLine(String.Format("{0,12:G6} <= {1,10} = {2,12:G6} <= {3,12:G6} [{4}]", variable.LowerBound, variable.FullName, variable.ValueInSI, variable.UpperBound, variable.InternalUnit));
     }
 }
        public ConstantGaussianMutation(OptimizationProblem problem, double standardDeviation, IGenerator randomNumberGenerator)
        {
            //problem.CheckInput();
            this.continuousVariablesCount = problem.Dimension;

            if (standardDeviation <= 0)
            {
                throw new ArgumentException("Standard deviation must be > 0, but was " + standardDeviation);
            }
            this.standardDeviations = new double[continuousVariablesCount];
            for (int i = 0; i < problem.Dimension; ++i)
            {
                this.standardDeviations[i] = standardDeviation;
            }

            if (randomNumberGenerator == null)
            {
                throw new ArgumentException("The random number generator must not be null");
            }
            this.normalDistribution = new NormalDistribution(randomNumberGenerator, 0, 1);
        }
        public void DualLinearRegressionWithResidualSumOfSquares()
        {
            // obtain the test data
            var trainingSet = new List<DataPoint<double>>
            {
                new DataPoint<double>(-1, new [] {-1.5 , -1.0 }),
                new DataPoint<double>(0, new [] {0.5, 1.0}),
                new DataPoint<double>(1, new [] {2.5, 3.0}),
                new DataPoint<double>(2, new [] {4.5, 5.0}),
                new DataPoint<double>(3, new [] {6.5, 6.0})
            };

            // assume a hypothesis
            var hypothesis = new DualLinearHypothesis(1);
            var initialCoefficients = Vector<double>.Build.Random(2);

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem<double, IDifferentiableCostFunction<double>>(costFunction, initialCoefficients);

            // define the line search algorithm
            var lineSearch = new SecantMethod()
            {
                ErrorTolerance = 1E-7
            };

            // optimize!
            var gd = new FletcherReevesCG(lineSearch)
            {
                ErrorTolerance = 1E-7
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;
            coefficients[0].Should().BeApproximately(0.5, 1E-6D, "because that's the underlying system's intercept");
            coefficients[1].Should().BeApproximately(2, 1E-6D, "because that's the underlying system's slope");
        }
Ejemplo n.º 24
0
        public void PolakRibiereRosenbrock()
        {
            // starting point for search is somewhere
            var initialTheta = Vector <double> .Build.DenseOfArray(new[] { -1D, 1D });

            // define the hypothesis with default parameter
            var rosenbrockParameter = Vector <double> .Build.DenseOfArray(new[] { 1D, 100D });

            var hypothesis = new RosenbrockHypothesis();

            // cost function is sum of squared errors
            var costFunction = new FunctionValueOptimization <double>(hypothesis, rosenbrockParameter);

            // define the optimization problem
            var problem = new OptimizationProblem <double, IDifferentiableCostFunction <double> >(costFunction, initialTheta);

            // define the line search algorithm
            var lineSearch = new SecantMethod
            {
                LineSearchStepSize = 1E-5D
            };

            // optimize!
            var gd = new PolakRibiereCG(lineSearch)
            {
                ErrorTolerance = 1E-8D,
                MaxIterations  = 15000
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;

            coefficients[0].Should().BeApproximately(rosenbrockParameter[0], 1E-5D, "because the Rosenbrock function has a minimum at x={0}, y={1}", rosenbrockParameter[0], Math.Sqrt(rosenbrockParameter[0]));
            coefficients[1].Should().BeApproximately(Math.Sqrt(rosenbrockParameter[0]), 1E-5D, "because the Rosenbrock function has a minimum at x={0}, y={1}", rosenbrockParameter[0], Math.Sqrt(rosenbrockParameter[0]));
        }
 public StandardBinaryCoding(OptimizationProblem problem, int bitsPerContinuousVariable, int bitsPerIntegerVariable) :
     base(problem, bitsPerContinuousVariable, bitsPerIntegerVariable)
 {
 }
 protected Builder(OptimizationProblem problem)
 {
     this.problem = problem;
 }
Ejemplo n.º 27
0
 public CMAESMain(OptimizationProblem optimizationProblem)
 {
     CMAESMain.OptProblem = optimizationProblem;
 }
Ejemplo n.º 28
0
 public bool Solve(OptimizationProblem problem)
 {
     ProblemData = problem;
     return(Solve());
 }
 public RealCodedGeneticAlgorithmBuilder(OptimizationProblem problem) : base(problem)
 {
     this.problem = problem;
 }
Ejemplo n.º 30
0
 public Builder(OptimizationProblem optimizationProblem)
 {
     ProblemChecker.Check(optimizationProblem);
     this.optimizationProblem = optimizationProblem;
 }
 public Builder(OptimizationProblem optimizationProblem)
 {
     this.optimizationProblem = optimizationProblem;
 }
        public void UnivariateExponentialRegressionWithResidualSumOfSquares()
        {
            var theta = Vector<double>.Build.DenseOfArray(new[] { 0D, 13500D, -1.7D });
            var initialTheta = Vector<double>.Build.DenseOfArray(new[] { 0D, 10000D, -1D });

            // define the hypothesis
            var hypothesis = new UnivariateExponentialHypothesis();

            // define a probability distribution
            var distribution = new ContinuousUniform(0D, 1000D);

            // obtain the test data
            const int dataPoints = 100;
            var trainingSet = new List<DataPoint<double>>(dataPoints);
            for (int i = 0; i < dataPoints; ++i)
            {
                var inputs = Vector<double>.Build.Random(1, distribution);
                var output = hypothesis.Evaluate(theta, inputs);
                trainingSet.Add(new DataPoint<double>(inputs, output));
            };

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem<double, IDifferentiableCostFunction<double>>(costFunction, initialTheta);

            // optimize!
            var gd = new ResilientErrorGD();
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;
            coefficients[1].Should().BeApproximately(theta[1], 1000D, "because that's the underlying system's [a] parameter");
            coefficients[2].Should().BeApproximately(theta[2], 1E-2D, "because that's the underlying system's [b] parameter");
            coefficients[0].Should().BeApproximately(theta[0], 1E-5D, "because that's the underlying system's offset");
        }
        public void RosenbrockParameterFitWithResidualSumOfSquares()
        {
            // parameter is default Rosenbrock
            var realTheta = Vector<double>.Build.DenseOfArray(new []{1D, 105D});
            var initialTheta = Vector<double>.Build.DenseOfArray(new []{2D, 200D});

            // define the hypothesis
            var hypothesis = new RosenbrockHypothesis();

            // define a probability distribution
            var distribution = new ContinuousUniform(-10D, 10D);

            // obtain the test data
            const int dataPoints = 10;
            var trainingSet = new List<DataPoint<double>>(dataPoints);
            for (int i = 0; i < dataPoints; ++i)
            {
                var inputs = Vector<double>.Build.Random(2, distribution);
                var output = hypothesis.Evaluate(realTheta, inputs);
                trainingSet.Add(new DataPoint<double>(inputs, output));
            };

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem<double, IDifferentiableCostFunction<double>>(costFunction, initialTheta);

            // optimize!
            var gd = new ResilientErrorGD
            {
                ErrorTolerance = 0.0D // TODO: actually use it
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;
            coefficients[0].Should().BeApproximately(realTheta[0], 1D, "because that's the functions [a] parameter");
            coefficients[1].Should().BeApproximately(realTheta[1], 1D, "because that's the functions [b] parameter");
        }
Ejemplo n.º 34
0
 public BoundaryMutation(OptimizationProblem problem, double mutationProbability) :
     this(problem, mutationProbability, RandomNumberGenerationUtilities.troschuetzRandom)
 {
 }
Ejemplo n.º 35
0
        /// <summary>
        /// This is the entry point for setting up the message handlers for the
        /// messages code generated from the (partial) structs defined for this
        /// smart component in the CodeGen/SmartCache.cs.
        /// </summary>
        /// <remarks>
        /// See class comments for further details.
        /// </remarks>
        static AssemblyInitializer()
        {
            // Setup message callbacks.
            //
            // Note: these message properties are code generated from the
            // (partial) structs in CodeGen/SmartCache.cs
            //
            // See out/Mlos.CodeGen.out/SmartCache/*.cs for the C# code
            // generation output from those partial definitions.
            //
            SmartCacheProxy.CacheRequestEventMessage.Callback       = CacheRequestEventMessageHandler;
            SmartCacheProxy.RequestNewConfigurationMessage.Callback = RequestNewConfigurationMessageHandler;

            // Create smart cache parameter search space.
            //
            // These hypergrids define the combination of valid ranges
            // of values for the different tunables.
            // Note that some of these are interdependent.
            //
            // TODO: Eventually this will also be code generated from additional
            // "domain range" attributes on the "ScalarSettings" defined for the
            // component (see also CodeGen/SmartCache.cs)
            //
            Hypergrid cacheSearchSpace = new Hypergrid(
                name: "smart_cache_config",
                dimension: new CategoricalDimension("cache_implementation", CacheEvictionPolicy.LeastRecentlyUsed, CacheEvictionPolicy.MostRecentlyUsed))
                                         .Join(
                subgrid: new Hypergrid(
                    name: "lru_cache_config",
                    dimension: new DiscreteDimension("cache_size", min: 1, max: 1 << 12)),
                onExternalDimension: new CategoricalDimension("cache_implementation", CacheEvictionPolicy.LeastRecentlyUsed))
                                         .Join(
                subgrid: new Hypergrid(
                    name: "mru_cache_config",
                    dimension: new DiscreteDimension("cache_size", min: 1, max: 1 << 12)),
                onExternalDimension: new CategoricalDimension("cache_implementation", CacheEvictionPolicy.MostRecentlyUsed));

            // Create optimization problem.
            //
            // Here we declare to the optimizer what our desired output from the
            // component to optimize is.
            //
            // In this case we declare "hit rate", which will be calculated as a
            // percentage, is the thing we want the optimizer to improve.
            //
            var optimizationProblem = new OptimizationProblem
            {
                ParameterSpace = cacheSearchSpace,
                ContextSpace   = null,
                ObjectiveSpace = new Hypergrid(
                    name: "objectives",
                    dimensions: new ContinuousDimension(name: "HitRate", min: 0.0, max: 1.0)),
            };

            // Define optimization objective.
            //
            optimizationProblem.Objectives.Add(
                new OptimizationObjective
            {
                // Tell the optimizer that we want to maximize hit rate.
                //
                Name     = "HitRate",
                Minimize = false,
            });

            // Get a local reference to the optimizer to reuse when processing messages later on.
            //
            // Note: we read this from a global variable that should have been
            // setup for the Mlos.Agent (e.g. in the Mlos.Agent.Server).
            //
            IOptimizerFactory optimizerFactory = MlosContext.OptimizerFactory;

            OptimizerProxy = optimizerFactory?.CreateRemoteOptimizer(optimizationProblem: optimizationProblem);
        }