Quadratic objective function.
상속: IObjectiveFunction
예제 #1
0
        public void ConstructorTest2()
        {
            var function = new NonlinearObjectiveFunction(2, x => x[0] * x[1]);

            NonlinearConstraint[] constraints = 
            {
                new NonlinearConstraint(function, x => 1.0 - x[0] * x[0] - x[1] * x[1])
            };

            Cobyla cobyla = new Cobyla(function, constraints);

            for (int i = 0; i < cobyla.Solution.Length; i++)
                cobyla.Solution[i] = 1;

            Assert.IsTrue(cobyla.Minimize());
            double minimum = cobyla.Value;

            double[] solution = cobyla.Solution;

            double sqrthalf = Math.Sqrt(0.5);

            Assert.AreEqual(-0.5, minimum, 1e-10);
            Assert.AreEqual(sqrthalf, solution[0], 1e-5);
            Assert.AreEqual(-sqrthalf, solution[1], 1e-5);

            double expectedMinimum = function.Function(cobyla.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
        public void QuadraticConstraintConstructorTest()
        {
            IObjectiveFunction objective = null;

            double[,] quadraticTerms = 
            {
                {  1, 2, 3 },
                {  4, 5, 6 },
                {  7, 8, 9 },
            };

            double[] linearTerms = { 1, 2, 3 };

            objective = new NonlinearObjectiveFunction(3, f => f[0] + f[1] + f[2]);

            QuadraticConstraint target = new QuadraticConstraint(objective,
                quadraticTerms, linearTerms,
                ConstraintType.LesserThanOrEqualTo, 0);

            var function = target.Function;
            var gradient = target.Gradient;

            FiniteDifferences fd = new FiniteDifferences(3, function);

            double[][] x =
            {
                new double[] { 1, 2, 3 },
                new double[] { 3, 1, 4 },
                new double[] { -6 , 5, 9 },
                new double[] { 31, 25, 246 },
                new double[] { -0.102, 0, 10 },
            };


            { // Function test
                for (int i = 0; i < x.Length; i++)
                {
                    double expected =
                        (x[i].Multiply(quadraticTerms)).InnerProduct(x[i])
                        + linearTerms.InnerProduct(x[i]);

                    double actual = function(x[i]);

                    Assert.AreEqual(expected, actual, 1e-8);
                }
            }

            { // Gradient test
                for (int i = 0; i < x.Length; i++)
                {
                    double[] expected = fd.Compute(x[i]);
                    double[] actual = gradient(x[i]);

                    for (int j = 0; j < actual.Length; j++)
                        Assert.AreEqual(expected[j], actual[j], 1e-8);
                }
            }


        }
        /// <summary>
        ///   Initializes a new instance of the <see cref="BaseOptimizationMethod"/> class.
        /// </summary>
        /// 
        /// <param name="function">The objective function whose optimum values should be found.</param>
        /// 
        protected BaseOptimizationMethod(NonlinearObjectiveFunction function)
        {
            if (function == null)
                throw new ArgumentNullException("function");

            init(function.NumberOfVariables);
            this.Function = function.Function;
        }
        public void AugmentedLagrangianSolverConstructorTest1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // min 100(y-x*x)²+(1-x)²
            //
            // s.t.  x <= 0
            //       y <= 0
            //

            var f = new NonlinearObjectiveFunction(2,

                function: (x) => 100 * Math.Pow(x[1] - x[0] * x[0], 2) + Math.Pow(1 - x[0], 2),

                gradient: (x) => new[] 
                {
                    2.0 * (200.0 * x[0]*x[0]*x[0] - 200.0 * x[0] * x[1] + x[0] - 1), // df/dx
                    200 * (x[1] - x[0]*x[0])                                         // df/dy
                }

            );


            var constraints = new List<NonlinearConstraint>();

            constraints.Add(new NonlinearConstraint(f,

                function: (x) => x[0],
                gradient: (x) => new[] { 1.0, 0.0 },

                shouldBe: ConstraintType.LesserThanOrEqualTo, value: 0
            ));

            constraints.Add(new NonlinearConstraint(f,

                function: (x) => x[1],
                gradient: (x) => new[] { 0.0, 1.0 },

                shouldBe: ConstraintType.LesserThanOrEqualTo, value: 0
            ));

            var solver = new AugmentedLagrangian(f, constraints);

            Assert.IsTrue(solver.Minimize());
            double minValue = solver.Value;

            Assert.IsFalse(Double.IsNaN(minValue));
            Assert.AreEqual(1, minValue, 1e-5);
            Assert.AreEqual(0, solver.Solution[0], 1e-5);
            Assert.AreEqual(0, solver.Solution[1], 1e-5);
        }
예제 #5
0
        public void ConstructorTest4()
        {
            var function = new NonlinearObjectiveFunction(2, x =>
                Math.Pow(x[0] * x[0] - x[1], 2.0) + Math.Pow(1.0 + x[0], 2.0));

            NelderMead solver = new NelderMead(function);

            Assert.IsTrue(solver.Minimize());
            double minimum = solver.Value;
            double[] solution = solver.Solution;

            Assert.AreEqual(0, minimum, 1e-10);
            Assert.AreEqual(-1, solution[0], 1e-5);
            Assert.AreEqual(1, solution[1], 1e-4);

            double expectedMinimum = function.Function(solver.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
예제 #6
0
        public void ConstructorTest4()
        {
            // Weak version of Rosenbrock's problem.
            var function = new NonlinearObjectiveFunction(2, x =>
                Math.Pow(x[0] * x[0] - x[1], 2.0) + Math.Pow(1.0 + x[0], 2.0));

            Subplex solver = new Subplex(function);

            Assert.IsTrue(solver.Minimize());
            double minimum = solver.Value;
            double[] solution = solver.Solution;

            Assert.AreEqual(2, solution.Length);
            Assert.AreEqual(0, minimum, 1e-10);
            Assert.AreEqual(-1, solution[0], 1e-5);
            Assert.AreEqual(1, solution[1], 1e-4);

            double expectedMinimum = function.Function(solver.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
예제 #7
0
 // We don't use this at the moment
 static List<NonlinearConstraint> CreateConstraints (Parameter[] x, NonlinearObjectiveFunction f)
 {
     // Now we can start stating the constraints 
     var nlConstraints = x.SelectMany ((p, i) =>  {
         Func<double[], double> cfn = args => x [i].Value;
         return new[] {
             new NonlinearConstraint 
                 ( f
                 , function: cfn
                 , shouldBe: ConstraintType.GreaterThanOrEqualTo
                 , value: p.Min
                 , gradient: Grad (x.Length, cfn)),
             new NonlinearConstraint 
                 ( f
                 , function: cfn
                 , shouldBe: ConstraintType.LesserThanOrEqualTo
                 , value: p.Max
                 , gradient: Grad (x.Length, cfn)),
         };
     }).ToList ();
     return nlConstraints;
 }
        public void ConstructorTest2()
        {
            Accord.Math.Tools.SetupGenerator(0);

            var function = new NonlinearObjectiveFunction(2,
                function: x => x[0] * x[1],
                gradient: x => new[] { x[1], x[0] });

            NonlinearConstraint[] constraints = 
            {
                new NonlinearConstraint(function,
                    function: x => 1.0 - x[0] * x[0] - x[1] * x[1],
                    gradient: x => new [] { -2 * x[0], -2 * x[1]}),
                new NonlinearConstraint(function,
                    function: x => x[0],
                    gradient: x => new [] { 1.0, 0.0}),
            };

            var target = new ConjugateGradient(2);
            AugmentedLagrangian solver = new AugmentedLagrangian(target, function, constraints);
            
            Assert.IsTrue(solver.Minimize());
            double minimum = solver.Value;

            double[] solution = solver.Solution;

            double sqrthalf = Math.Sqrt(0.5);

            Assert.AreEqual(-0.5, minimum, 1e-5);
            Assert.AreEqual(sqrthalf, solution[0], 1e-5);
            Assert.AreEqual(-sqrthalf, solution[1], 1e-5);

            double expectedMinimum = function.Function(solver.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
        private static void test2(IGradientOptimizationMethod inner)
        {
            // maximize 2x + 3y, s.t. 2x² + 2y² <= 50
            //
            // http://www.wolframalpha.com/input/?i=max+2x+%2B+3y%2C+s.t.+2x%C2%B2+%2B+2y%C2%B2+%3C%3D+50

            // Max x' * c
            //  x

            // s.t. x' * A * x <= k
            //      x' * i     = 1
            // lower_bound < x < upper_bound

            double[] c = { 2, 3 };
            double[,] A = { { 2, 0 }, { 0, 2 } };
            double k = 50;

            // Create the objective function
            var objective = new NonlinearObjectiveFunction(2,
                function: (x) => x.InnerProduct(c),
                gradient: (x) => c
            );

            // Test objective
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 10; j++)
                {
                    double expected = i * 2 + j * 3;
                    double actual = objective.Function(new double[] { i, j });
                    Assert.AreEqual(expected, actual);
                }
            }


            // Create the optimization constraints
            var constraints = new List<NonlinearConstraint>();

            constraints.Add(new QuadraticConstraint(objective,
                quadraticTerms: A,
                shouldBe: ConstraintType.LesserThanOrEqualTo, value: k
            ));


            // Test first constraint
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 10; j++)
                {
                    var input = new double[] { i, j };

                    double expected = i * (2 * i + 0 * j) + j * (0 * i + 2 * j);
                    double actual = constraints[0].Function(input);
                    Assert.AreEqual(expected, actual);
                }
            }


            // Create the solver algorithm
            AugmentedLagrangian solver =
                new AugmentedLagrangian(inner, objective, constraints);

            Assert.AreEqual(inner, solver.Optimizer);

            Assert.IsTrue(solver.Maximize());
            double maxValue = solver.Value;

            Assert.AreEqual(18.02, maxValue, 1e-2);
            Assert.AreEqual(2.77, solver.Solution[0], 1e-2);
            Assert.AreEqual(4.16, solver.Solution[1], 1e-2);
        }
예제 #10
0
        public void ConstructorTest6_3()
        {
            bool thrown = false;

            try
            {
                var function = new NonlinearObjectiveFunction(2, x => -x[0] - x[1]);

                NonlinearConstraint[] constraints = 
                {
                    new NonlinearConstraint(2, x =>  x[1] - x[0] * x[0]),
                    new NonlinearConstraint(4, x =>  1.0 - x[0] * x[0] - x[1] * x[1]),
                };

                Cobyla cobyla = new Cobyla(function, constraints);

                Assert.IsTrue(cobyla.Minimize());
                double minimum = cobyla.Value;
            }
            catch (Exception)
            {
                thrown = true;
            }

            Assert.IsTrue(thrown);
        }
예제 #11
0
        public void ConstructorTest7()
        {
            /// This problem is taken from Fletcher's book Practical Methods of
            /// Optimization and has the equation number (14.4.2).
            var function = new NonlinearObjectiveFunction(3, x => x[2]);

            NonlinearConstraint[] constraints = 
            {
                new NonlinearConstraint(3, x=> 5.0 * x[0] - x[1] + x[2]),
                new NonlinearConstraint(3, x =>  x[2] - x[0] * x[0] - x[1] * x[1] - 4.0 * x[1]),
                new NonlinearConstraint(3, x =>  x[2] - 5.0 * x[0] - x[1]),
            };

            Cobyla cobyla = new Cobyla(function, constraints);

            Assert.IsTrue(cobyla.Minimize());
            double minimum = cobyla.Value;
            double[] solution = cobyla.Solution;

            Assert.AreEqual(-3, minimum, 1e-5);
            Assert.AreEqual(0.0, solution[0], 1e-5);
            Assert.AreEqual(-3.0, solution[1], 1e-5);
            Assert.AreEqual(-3.0, solution[2], 1e-5);

            double expectedMinimum = function.Function(cobyla.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
예제 #12
0
 /// <summary>
 ///   Creates a new <see cref="ResilientBackpropagation"/> function optimizer.
 /// </summary>
 ///
 /// <param name="function">The function to be optimized.</param>
 ///
 public ResilientBackpropagation(NonlinearObjectiveFunction function)
     : this(function.NumberOfVariables, function.Function, function.Gradient)
 {
 }
예제 #13
0
 /// <summary>
 ///   Creates a new instance of the L-BFGS optimization algorithm.
 /// </summary>
 ///
 /// <param name="function">The function to be optimized.</param>
 ///
 public BroydenFletcherGoldfarbShanno(NonlinearObjectiveFunction function)
     : this(function.NumberOfVariables, function.Function, function.Gradient)
 {
 }
예제 #14
0
        public void SubspaceTest1()
        {
            var function = new NonlinearObjectiveFunction(5, x =>
                10.0 * Math.Pow(x[0] * x[0] - x[1], 2.0) + Math.Pow(1.0 + x[0], 2.0));

            NelderMead solver = new NelderMead(function);

            solver.NumberOfVariables = 2;

            Assert.IsTrue(solver.Minimize());
            double minimum = solver.Value;
            double[] solution = solver.Solution;

            Assert.AreEqual(5, solution.Length);
            Assert.AreEqual(-0, minimum, 1e-6);
            Assert.AreEqual(-1, solution[0], 1e-3);
            Assert.AreEqual(+1, solution[1], 1e-3);

            double expectedMinimum = function.Function(solver.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
예제 #15
0
 /// <summary>
 ///   Creates a new instance of the Augmented Lagrangian algorithm.
 /// </summary>
 /// 
 /// <param name="function">The objective function to be optimized.</param>
 /// <param name="constraints">
 ///   The <see cref="NonlinearConstraint"/>s to which the solution must be subjected.</param>
 /// 
 public AugmentedLagrangian(NonlinearObjectiveFunction function, IEnumerable<NonlinearConstraint> constraints)
     : base(function.NumberOfVariables)
 {
     init(function, constraints, null);
 }
예제 #16
0
 /// <summary>
 ///   Creates a new instance of the Augmented Lagrangian algorithm.
 /// </summary>
 ///
 /// <param name="function">The objective function to be optimized.</param>
 /// <param name="constraints">
 ///   The <see cref="NonlinearConstraint"/>s to which the solution must be subjected.</param>
 ///
 public AugmentedLagrangian(NonlinearObjectiveFunction function, IEnumerable <NonlinearConstraint> constraints)
     : base(function.NumberOfVariables)
 {
     init(function, constraints, null);
 }
예제 #17
0
        public void ConstructorTest5()
        {
            var function = new NonlinearObjectiveFunction(2, x =>
                10.0 * Math.Pow(x[0] * x[0] - x[1], 2.0) + Math.Pow(1.0 + x[0], 2.0));

            Subplex solver = new Subplex(function);

            Assert.IsTrue(solver.Minimize());
            double minimum = solver.Value;
            double[] solution = solver.Solution;

            Assert.AreEqual(-0, minimum, 1e-6);
            Assert.AreEqual(-1, solution[0], 1e-3);
            Assert.AreEqual(+1, solution[1], 1e-3);

            double expectedMinimum = function.Function(solver.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
예제 #18
0
        public void ConstructorTest8()
        {
            /// This problem is taken from page 66 of Hock and Schittkowski's book Test
            /// Examples for Nonlinear Programming Codes. It is their test problem Number
            /// 43, and has the name Rosen-Suzuki.
            var function = new NonlinearObjectiveFunction(4, x => x[0] * x[0]
                + x[1] * x[1] + 2.0 * x[2] * x[2]
                + x[3] * x[3] - 5.0 * x[0] - 5.0 * x[1]
                - 21.0 * x[2] + 7.0 * x[3]);

            NonlinearConstraint[] constraints = 
            {
                new NonlinearConstraint(4, x=> 8.0 - x[0] * x[0] 
                    - x[1] * x[1] - x[2] * x[2] - x[3] * x[3] - x[0] + x[1] - x[2] + x[3]),

                new NonlinearConstraint(4, x => 10.0 - x[0] * x[0] 
                    - 2.0 * x[1] * x[1] - x[2] * x[2] - 2.0 * x[3] * x[3] + x[0] + x[3]),

                new NonlinearConstraint(4, x => 5.0 - 2.0 * x[0] * x[0] 
                    - x[1] * x[1] - x[2] * x[2] - 2.0 * x[0] + x[1] + x[3])
            };

            Cobyla cobyla = new Cobyla(function, constraints);

            Assert.IsTrue(cobyla.Minimize());
            double minimum = cobyla.Value;
            double[] solution = cobyla.Solution;

            double[] expected = 
            {
                0.0, 1.0, 2.0, -1.0
            };

            for (int i = 0; i < expected.Length; i++)
                Assert.AreEqual(expected[i], cobyla.Solution[i], 1e-4);
            Assert.AreEqual(-44, minimum, 1e-10);

            double expectedMinimum = function.Function(cobyla.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
 /// <summary>
 ///   Creates a new instance of the L-BFGS optimization algorithm.
 /// </summary>
 /// 
 /// <param name="function">The function to be optimized.</param>
 /// 
 public ResilientBackpropagation(NonlinearObjectiveFunction function)
     : this(function.NumberOfVariables, function.Function, function.Gradient)
 {
 }
예제 #20
0
        public void ConstructorTest9()
        {
            /// This problem is taken from page 111 of Hock and Schittkowski's
            /// book Test Examples for Nonlinear Programming Codes. It is their
            /// test problem Number 100.
            /// 
            var function = new NonlinearObjectiveFunction(7, x =>
                Math.Pow(x[0] - 10.0, 2.0) + 5.0 * Math.Pow(x[1] - 12.0, 2.0) + Math.Pow(x[2], 4.0) +
                3.0 * Math.Pow(x[3] - 11.0, 2.0) + 10.0 * Math.Pow(x[4], 6.0) + 7.0 * x[5] * x[5] + Math.Pow(x[6], 4.0) -
                4.0 * x[5] * x[6] - 10.0 * x[5] - 8.0 * x[6]);

            NonlinearConstraint[] constraints = 
            {
                new NonlinearConstraint(7, x => 127.0 - 2.0 * x[0] * x[0] - 3.0 * Math.Pow(x[1], 4.0)
                    - x[2] - 4.0 * x[3] * x[3] - 5.0 * x[4]),
                new NonlinearConstraint(7, x => 282.0 - 7.0 * x[0] - 3.0 * x[1] - 10.0 * x[2] * x[2] - x[3] + x[4]),
                new NonlinearConstraint(7, x => 196.0 - 23.0 * x[0] - x[1] * x[1] - 6.0 * x[5] * x[5] + 8.0 * x[6]),
                new NonlinearConstraint(7, x => -4.0 * x[0] * x[0] - x[1] * x[1] + 3.0 * x[0] * x[1] 
                    - 2.0 * x[2] * x[2] - 5.0 * x[5] + 11.0 * x[6])
            };

            Cobyla cobyla = new Cobyla(function, constraints);

            Assert.IsTrue(cobyla.Minimize());
            double minimum = cobyla.Value;
            double[] solution = cobyla.Solution;

            double[] expected = 
            {
                2.330499, 1.951372, -0.4775414, 4.365726, -0.624487, 1.038131, 1.594227
            };

            for (int i = 0; i < expected.Length; i++)
                Assert.AreEqual(expected[i], cobyla.Solution[i], 1e-4);
            Assert.AreEqual(680.63005737443393, minimum, 1e-6);

            double expectedMinimum = function.Function(cobyla.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
        public void ConstructorTest3()
        {
            // minimize f(x) = x*y*z, 
            // s.t. 
            //   
            //    1 - x² - 2y² - 3z² > 0
            //    x > 0,
            //    y > 0
            //

            // Easy three dimensional minimization in ellipsoid.
            var function = new NonlinearObjectiveFunction(3,
                function: x => x[0] * x[1] * x[2], 
                gradient: x => new[] { x[1] * x[2], x[0] * x[2], x[0] * x[1] });

            NonlinearConstraint[] constraints = 
            {
                new NonlinearConstraint(3,
                    function: x =>  1.0 - x[0] * x[0] - 2.0 * x[1] * x[1] - 3.0 * x[2] * x[2],
                    gradient: x =>  new[] { -2.0 * x[0],  -4.0 * x[1], -6.0 * x[2] }),
                new NonlinearConstraint(3,
                    function: x =>  x[0],
                    gradient: x =>  new[] { 1.0, 0, 0 }),
                new NonlinearConstraint(3,
                    function: x =>  x[1],
                    gradient: x =>  new[] { 0, 1.0, 0 }),
                new NonlinearConstraint(3,
                    function: x =>  -x[2],
                    gradient: x =>  new[] { 0, 0, -1.0 }),
            };

            for (int i = 0; i < constraints.Length; i++)
			{
                Assert.AreEqual(ConstraintType.GreaterThanOrEqualTo, constraints[i].ShouldBe);
                Assert.AreEqual(0, constraints[i].Value);
			}

            var inner = new BroydenFletcherGoldfarbShanno(3);
            inner.LineSearch = LineSearch.BacktrackingArmijo;
            inner.Corrections = 10;

            var solver = new AugmentedLagrangian(inner, function, constraints);

            Assert.AreEqual(inner, solver.Optimizer);

            Assert.IsTrue(solver.Minimize());
            double minimum = solver.Value;
            double[] solution = solver.Solution;

            double[] expected = 
            {
                1.0 / Math.Sqrt(3.0), 1.0 / Math.Sqrt(6.0), -1.0 / 3.0
            };


            for (int i = 0; i < expected.Length; i++)
                Assert.AreEqual(expected[i], solver.Solution[i], 1e-3);
            Assert.AreEqual(-0.078567420132031968, minimum, 1e-4);

            double expectedMinimum = function.Function(solver.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
예제 #22
0
        private void init(NonlinearObjectiveFunction function,
            IEnumerable<NonlinearConstraint> constraints, IGradientOptimizationMethod innerSolver)
        {
            if (function != null)
            {
                if (function.NumberOfVariables != NumberOfVariables)
                {
                    throw new ArgumentOutOfRangeException("function",
                        "Incorrect number of variables in the objective function. " +
                        "The number of variables must match the number of variables set in the solver.");
                }

                this.Function = function.Function;
                this.Gradient = function.Gradient;
            }

            if (innerSolver == null)
            {
                innerSolver = new BroydenFletcherGoldfarbShanno(NumberOfVariables)
                {
                    LineSearch = Optimization.LineSearch.BacktrackingArmijo,
                    Corrections = 10
                };
            }

            List<NonlinearConstraint> equality = new List<NonlinearConstraint>();
            List<NonlinearConstraint> lesserThan = new List<NonlinearConstraint>();
            List<NonlinearConstraint> greaterThan = new List<NonlinearConstraint>();

            foreach (var c in constraints)
            {
                switch (c.ShouldBe)
                {
                    case ConstraintType.EqualTo:
                        equality.Add(c); break;

                    case ConstraintType.GreaterThanOrEqualTo:
                        greaterThan.Add(c); break;

                    case ConstraintType.LesserThanOrEqualTo:
                        lesserThan.Add(c); break;

                    default:
                        throw new ArgumentException("Unknown constraint type.", "constraints");
                }
            }

            this.lesserThanConstraints = lesserThan.ToArray();
            this.greaterThanConstraints = greaterThan.ToArray();
            this.equalityConstraints = equality.ToArray();

            this.lambda = new double[equalityConstraints.Length];
            this.mu = new double[lesserThanConstraints.Length];
            this.nu = new double[greaterThanConstraints.Length];

            this.dualSolver = innerSolver;
            dualSolver.Function = objectiveFunction;
            dualSolver.Gradient = objectiveGradient;
        }
예제 #23
0
        public void ConstructorTest10()
        {
            /// This problem is taken from page 415 of Luenberger's book Applied
            /// Nonlinear Programming. It is to maximize the area of a hexagon of
            /// unit diameter.
            /// 
            var function = new NonlinearObjectiveFunction(9, x =>
                -0.5 * (x[0] * x[3] - x[1] * x[2] + x[2] * x[8]
                - x[4] * x[8] + x[4] * x[7] - x[5] * x[6]));

            NonlinearConstraint[] constraints = 
            {
                new NonlinearConstraint(9, x => 1.0 - x[2] * x[2] - x[3] * x[3]),
                new NonlinearConstraint(9, x =>  1.0 - x[8] * x[8]),
                new NonlinearConstraint(9, x =>  1.0 - x[4] * x[4] - x[5] * x[5]),
                new NonlinearConstraint(9, x =>  1.0 - x[0] * x[0] - Math.Pow(x[1] - x[8], 2.0)),
                new NonlinearConstraint(9, x =>  1.0 - Math.Pow(x[0] - x[4], 2.0) - Math.Pow(x[1] - x[5], 2.0)),
                new NonlinearConstraint(9, x =>  1.0 - Math.Pow(x[0] - x[6], 2.0) - Math.Pow(x[1] - x[7], 2.0)),
                new NonlinearConstraint(9, x =>  1.0 - Math.Pow(x[2] - x[4], 2.0) - Math.Pow(x[3] - x[5], 2.0)),
                new NonlinearConstraint(9, x =>  1.0 - Math.Pow(x[2] - x[6], 2.0) - Math.Pow(x[3] - x[7], 2.0)),
                new NonlinearConstraint(9, x =>  1.0 - x[6] * x[6] - Math.Pow(x[7] - x[8], 2.0)),
                new NonlinearConstraint(9, x =>  x[0] * x[3] - x[1] * x[2]),
                new NonlinearConstraint(9, x =>  x[2] * x[8]),
                new NonlinearConstraint(9, x =>  -x[4] * x[8]),
                new NonlinearConstraint(9, x =>  x[4] * x[7] - x[5] * x[6]),
                new NonlinearConstraint(9, x =>  x[8]),
            };

            Cobyla cobyla = new Cobyla(function, constraints);

            for (int i = 0; i < cobyla.Solution.Length; i++)
                cobyla.Solution[i] = 1;

            Assert.IsTrue(cobyla.Minimize());
            double minimum = cobyla.Value;
            double[] solution = cobyla.Solution;

            double[] expected = 
            {
                0.688341, 0.725387, -0.284033, 0.958814, 0.688341, 0.725387, -0.284033, 0.958814, 0.0
            };

            for (int i = 0; i < expected.Length; i++)
                Assert.AreEqual(expected[i], cobyla.Solution[i], 1e-2);
            Assert.AreEqual(-0.86602540378486847, minimum, 1e-10);

            double expectedMinimum = function.Function(cobyla.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }
        /// <summary>
        ///   Maximizes the given function. 
        /// </summary>
        /// 
        /// <param name="function">The function to be maximized.</param>
        /// 
        /// <returns>The maximum value found at the <see cref="Solution"/>.</returns>
        /// 
        public double Maximize(NonlinearObjectiveFunction function)
        {
            if (function.NumberOfVariables != numberOfVariables)
                throw new ArgumentOutOfRangeException("function",
                    "Incorrect number of variables in the objective function. " +
                    "The number of variables must match the number of variables set in the solver.");

            this.Function = x => -function.Function(x);
            this.Gradient = x => function.Gradient(x).Multiply(-1);

            minimize();

            return -Function(Solution);
        }
        public void ConstructorTest4()
        {
            // Example code from 
            // https://groups.google.com/forum/#!topic/accord-net/an0sJGGrOuU

            int nVariablesTest = 4; // number of variables
            int nConstraintsTest = 2; // number of constraints
            double constraintsTolerance = 1e-100;
            double[,] ATest = new double[,] { { 1, 2, 3, 4 }, { 0, 4, 3, 1 } }; // arbitary A matrix.  A*X =  b
            double[,] bTest = new double[,] { { 0 }, { 2 } }; // arbitary A matrix.  A*X =  b

            double[,] XSolve = ATest.Solve(bTest);  // uses the pseudoinverse to minimise norm(X) subject to A*X =  b

            // recreate Solve function using AugmentedLagrangian
            var fTest = new NonlinearObjectiveFunction(nVariablesTest, ds => ds.InnerProduct(ds), ds => ds.Multiply(2.0)); // minimise norm(ds)

            var nonlinearConstraintsTest = new List<NonlinearConstraint>(nConstraintsTest);  // linear constraints A*X = b
            for (int i = 0; i < nConstraintsTest; i++)
            {
                int j = i; // http://blogs.msdn.com/b/ericlippert/archive/2009/11/12/closing-over-the-loop-variable-considered-harmful.aspx
                nonlinearConstraintsTest.Add(new NonlinearConstraint(fTest, ds => ATest.GetRow(j).InnerProduct(ds) - (double)bTest.GetValue(j, 0), ConstraintType.EqualTo, 0.0, ds => ATest.GetRow(j), constraintsTolerance));
            }

            var innerSolverTest = new ResilientBackpropagation(nVariablesTest);
            innerSolverTest.Tolerance = constraintsTolerance;
            innerSolverTest.Iterations = 1000;
            var solverTest = new Accord.Math.Optimization.AugmentedLagrangian(innerSolverTest, fTest, nonlinearConstraintsTest);
            solverTest.MaxEvaluations = 0;
            bool didMinimise = solverTest.Minimize();

            var errorConstraintRelative = XSolve.Subtract(solverTest.Solution, 1).ElementwiseDivide(XSolve); // relative error between .Solve and .Minimize
            var errorConstraintAbsolute = XSolve.Subtract(solverTest.Solution, 1); // absolute error between .Solve and .Minimize

            double[] errorConstraintsTest = new double[nConstraintsTest];
            for (int i = 0; i < nConstraintsTest; i++)
            {
                errorConstraintsTest[i] = nonlinearConstraintsTest[i].Function(solverTest.Solution);
            }
        }
예제 #26
0
        private void init(NonlinearObjectiveFunction function,
                          IEnumerable <NonlinearConstraint> constraints, IGradientOptimizationMethod innerSolver)
        {
            if (function != null)
            {
                if (function.NumberOfVariables != NumberOfVariables)
                {
                    throw new ArgumentOutOfRangeException("function",
                                                          "Incorrect number of variables in the objective function. " +
                                                          "The number of variables must match the number of variables set in the solver.");
                }

                this.Function = function.Function;
                this.Gradient = function.Gradient;
            }

            if (innerSolver == null)
            {
                innerSolver = new BroydenFletcherGoldfarbShanno(NumberOfVariables)
                {
                    LineSearch    = Optimization.LineSearch.BacktrackingArmijo,
                    Corrections   = 10,
                    Epsilon       = 1e-10,
                    MaxIterations = 100000
                };
            }

            var equality    = new List <NonlinearConstraint>();
            var lesserThan  = new List <NonlinearConstraint>();
            var greaterThan = new List <NonlinearConstraint>();

            foreach (var c in constraints)
            {
                switch (c.ShouldBe)
                {
                case ConstraintType.EqualTo:
                    equality.Add(c); break;

                case ConstraintType.GreaterThanOrEqualTo:
                    greaterThan.Add(c); break;

                case ConstraintType.LesserThanOrEqualTo:
                    lesserThan.Add(c); break;

                default:
                    throw new ArgumentException("Unknown constraint type.", "constraints");
                }
            }

            this.lesserThanConstraints  = lesserThan.ToArray();
            this.greaterThanConstraints = greaterThan.ToArray();
            this.equalityConstraints    = equality.ToArray();

            this.lambda = new double[equalityConstraints.Length];
            this.mu     = new double[lesserThanConstraints.Length];
            this.nu     = new double[greaterThanConstraints.Length];

            this.g = new double[NumberOfVariables];

            this.dualSolver     = innerSolver;
            dualSolver.Function = objectiveFunction;
            dualSolver.Gradient = objectiveGradient;
        }
        public void AugmentedLagrangianSolverConstructorTest2()
        {
            // min 100(y-x*x)²+(1-x)²
            //
            // s.t.  x >= 0
            //       y >= 0
            //

            var f = new NonlinearObjectiveFunction(2,

                function: (x) => 100 * Math.Pow(x[1] - x[0] * x[0], 2) + Math.Pow(1 - x[0], 2),

                gradient: (x) => new[] 
                {
                    2.0 * (200.0 * Math.Pow(x[0], 3) - 200.0 * x[0] * x[1] + x[0] - 1), // df/dx
                    200 * (x[1] - x[0]*x[0])                                            // df/dy
                }

            );


            var constraints = new List<NonlinearConstraint>();

            constraints.Add(new NonlinearConstraint(f,

                function: (x) => x[0],
                gradient: (x) => new[] { 1.0, 0.0 },

                shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 0
            ));

            constraints.Add(new NonlinearConstraint(f,

                function: (x) => x[1],
                gradient: (x) => new[] { 0.0, 1.0 },

                shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 0
            ));

            var solver = new AugmentedLagrangianSolver(2, constraints);

            double minValue = solver.Minimize(f);

            Assert.AreEqual(0, minValue, 1e-10);
            Assert.AreEqual(1, solver.Solution[0], 1e-10);
            Assert.AreEqual(1, solver.Solution[1], 1e-10);

            Assert.IsFalse(Double.IsNaN(minValue));
            Assert.IsFalse(Double.IsNaN(solver.Solution[0]));
            Assert.IsFalse(Double.IsNaN(solver.Solution[1]));

        }
예제 #28
0
        /// <summary>
        ///   Creates a new instance of the Augmented Lagrangian algorithm.
        /// </summary>
        /// 
        /// <param name="innerSolver">The <see cref="IGradientOptimizationMethod">unconstrained 
        ///   optimization method</see> used internally to solve the dual of this optimization 
        ///   problem.</param>
        /// <param name="function">The objective function to be optimized.</param>
        /// <param name="constraints">
        ///   The <see cref="NonlinearConstraint"/>s to which the solution must be subjected.</param>
        /// 
        public AugmentedLagrangian(IGradientOptimizationMethod innerSolver,
            NonlinearObjectiveFunction function, IEnumerable<NonlinearConstraint> constraints)
            : base(innerSolver.NumberOfVariables)
        {
            if (innerSolver.NumberOfVariables != function.NumberOfVariables)
                throw new ArgumentException("The inner unconstrained optimization algorithm and the "
                    + "objective function should have the same number of variables.", "function");

            init(function, constraints, innerSolver);
        }
        public void AugmentedLagrangianSolverConstructorTest3()
        {
            // min x*y+ y*z
            //
            // s.t.  x^2 - y^2 + z^2 - 2  >= 0
            //       x^2 + y^2 + z^2 - 10 <= 0
            //

            double x = 0, y = 0, z = 0;

            var f = new NonlinearObjectiveFunction(

                function: () => x * y + y * z,

                gradient: () => new[] 
                {
                    y,     // df/dx
                    x + z, // df/dy
                    y,     // df/dz
                }

            );


            var constraints = new List<NonlinearConstraint>();

            constraints.Add(new NonlinearConstraint(f,

                function: () => x * x - y * y + z * z,
                gradient: () => new[] { 2 * x, -2 * y, 2 * z },

                shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 2
            ));

            constraints.Add(new NonlinearConstraint(f,

                function: () => x * x + y * y + z * z,
                gradient: () => new[] { 2 * x, 2 * y, 2 * z },

                shouldBe: ConstraintType.LesserThanOrEqualTo, value: 10
            ));

            var solver = new AugmentedLagrangianSolver(3, constraints);

            solver.Solution[0] = 1;
            solver.Solution[1] = 1;
            solver.Solution[2] = 1;

            double minValue = solver.Minimize(f);

            Assert.AreEqual(-6.9, minValue, 1e-1);
            Assert.AreEqual(+1.73, solver.Solution[0], 1e-2);
            Assert.AreEqual(-2.00, solver.Solution[1], 1e-2);
            Assert.AreEqual(+1.73, solver.Solution[2], 1e-2);

            Assert.IsFalse(Double.IsNaN(minValue));
            Assert.IsFalse(Double.IsNaN(solver.Solution[0]));
            Assert.IsFalse(Double.IsNaN(solver.Solution[1]));
            Assert.IsFalse(Double.IsNaN(solver.Solution[2]));

        }
예제 #30
0
 /// <summary>
 ///   Creates a new <see cref="Subplex"/> optimization algorithm.
 /// </summary>
 ///
 /// <param name="function">The objective function whose optimum values should be found.</param>
 ///
 public Subplex(NonlinearObjectiveFunction function)
     : base(function)
 {
     init(function.NumberOfVariables);
 }
        public void AugmentedLagrangianSolverConstructorTest7()
        {
            // maximize 2x + 3y, s.t. 2x² + 2y² <= 50

            // Max x' * c
            //  x

            // s.t. x' * A * x <= k
            //      x' * i     = 1
            // lower_bound < x < upper_bound

            double[] c = { 2, 3 };
            double[,] A = { { 2, 0 }, { 0, 2 } };
            double k = 50;

            // Create the objective function
            var objective = new NonlinearObjectiveFunction(2,
                function: (x) => x.InnerProduct(c),
                gradient: (x) => c
            );

            // Test objective
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 10; j++)
                {
                    double expected = i * 2 + j * 3;
                    double actual = objective.Function(new double[] { i, j });
                    Assert.AreEqual(expected, actual);
                }
            }


            // Create the optimization constraints
            var constraints = new List<NonlinearConstraint>();

            constraints.Add(new QuadraticConstraint(objective,
                quadraticTerms: A,
                shouldBe: ConstraintType.LesserThanOrEqualTo, value: k
            ));


            // Test first constraint
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 10; j++)
                {
                    double expected = i * (2 * i + 0 * j) + j * (0 * i + 2 * j);
                    double actual = constraints[0].Function(new double[] { i, j });
                    Assert.AreEqual(expected, actual);
                }
            }


            // Create the solver algorithm
            AugmentedLagrangianSolver solver =
                new AugmentedLagrangianSolver(2, constraints);

            double maxValue = solver.Maximize(objective);

            Assert.AreEqual(18.02, maxValue, 0.01);
            Assert.AreEqual(2.77, solver.Solution[0], 1e-2);
            Assert.AreEqual(4.16, solver.Solution[1], 1e-2);
        }
예제 #32
0
        /// <summary>
        ///   Implements the actual optimization algorithm. This
        ///   method should try to minimize the objective function.
        /// </summary>
        protected override bool Optimize()
        {
            if (Function == null)
            {
                throw new InvalidOperationException("function");
            }

            if (Gradient == null)
            {
                throw new InvalidOperationException("gradient");
            }

            NonlinearObjectiveFunction.CheckGradient(Gradient, Solution);


            int n = NumberOfVariables;
            int m = corrections;

            String task  = "";
            String csave = "";

            bool[] lsave  = new bool[4];
            int    iprint = 101;

            int[]  nbd   = new int[n];
            int[]  iwa   = new int[3 * n];
            int[]  isave = new int[44];
            double f     = 0.0d;

            double[] x     = new double[n];
            double[] l     = new double[n];
            double[] u     = new double[n];
            double[] g     = new double[n];
            double[] dsave = new double[29];

            int totalSize = 2 * m * n + 11 * m * m + 5 * n + 8 * m;

            if (work == null || work.Length < totalSize)
            {
                work = new double[totalSize];
            }

            int i = 0;

            {
                for (i = 0; i < UpperBounds.Length; i++)
                {
                    bool hasUpper = !Double.IsInfinity(UpperBounds[i]);
                    bool hasLower = !Double.IsInfinity(LowerBounds[i]);

                    if (hasUpper && hasLower)
                    {
                        nbd[i] = 2;
                    }
                    else if (hasUpper)
                    {
                        nbd[i] = 3;
                    }
                    else if (hasLower)
                    {
                        nbd[i] = 1;
                    }
                    else
                    {
                        nbd[i] = 0;  // unbounded
                    }
                    if (hasLower)
                    {
                        l[i] = LowerBounds[i];
                    }
                    if (hasUpper)
                    {
                        u[i] = UpperBounds[i];
                    }
                }
            }


            // We now define the starting point.
            {
                for (i = 0; i < n; i++)
                {
                    x[i] = Solution[i];
                }
            }

            double newF = 0;

            double[] newG = null;

            // We start the iteration by initializing task.
            task = "START";

            iterations = 0;

            //
            // c        ------- the beginning of the loop ----------
            //
L111:
            if (Token.IsCancellationRequested)
            {
                return(false);
            }

            iterations++;

            //
            // c     This is the call to the L-BFGS-B code.
            //
            setulb(n, m, x, 0, l, 0, u, 0, nbd, 0, ref f, g, 0,
                   factr, pgtol, work, 0, iwa, 0, ref task, iprint, ref csave,
                   lsave, 0, isave, 0, dsave, 0);


            //
            if ((task.StartsWith("FG", StringComparison.OrdinalIgnoreCase)))
            {
                newF = Function(x);
                newG = Gradient(x);
                evaluations++;

                f = newF;

                for (int j = 0; j < newG.Length; j++)
                {
                    g[j] = newG[j];
                }
            }

            // c
            else if ((task.StartsWith("NEW_X", StringComparison.OrdinalIgnoreCase)))
            {
            }
            else
            {
                if (task == "ABNORMAL_TERMINATION_IN_LNSRCH")
                {
                    Status = BoundedBroydenFletcherGoldfarbShannoStatus.LineSearchFailed;
                }
                else if (task == "CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH")
                {
                    Status = BoundedBroydenFletcherGoldfarbShannoStatus.FunctionConvergence;
                }
                else if (task == "CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL")
                {
                    Status = BoundedBroydenFletcherGoldfarbShannoStatus.GradientConvergence;
                }
                else
                {
                    throw OperationException(task, task);
                }

                for (int j = 0; j < Solution.Length; j++)
                {
                    Solution[j] = x[j];
                }

                newF = Function(x);
                newG = Gradient(x);
                evaluations++;

                if (Progress != null)
                {
                    Progress(this, new OptimizationProgressEventArgs(iterations, 0, newG, 0, null, 0, newF, 0, true)
                    {
                        Tag = new BoundedBroydenFletcherGoldfarbShannoInnerStatus(
                            isave, dsave, lsave, csave, work)
                    });
                }

                return(true);
            }


            if (Progress != null)
            {
                Progress(this, new OptimizationProgressEventArgs(iterations, 0, newG, 0, null, 0, f, 0, false)
                {
                    Tag = new BoundedBroydenFletcherGoldfarbShannoInnerStatus(
                        isave, dsave, lsave, csave, work)
                });
            }

            goto L111;
        }
        public void AugmentedLagrangianSolverConstructorTest4()
        {
            // min x*y+ y*z
            //
            // s.t.  x^2 - y^2 + z^2 - 2  >= 0
            //       x^2 + y^2 + z^2 - 10 <= 0
            //       x   + y               = 1
            //

            double x = 0, y = 0, z = 0;

            var f = new NonlinearObjectiveFunction(

                function: () => x * y + y * z,

                gradient: () => new[] 
                {
                    y,     // df/dx
                    x + z, // df/dy
                    y,     // df/dz
                }

            );


            var constraints = new List<NonlinearConstraint>();

            constraints.Add(new NonlinearConstraint(f,

                function: () => x * x - y * y + z * z,
                gradient: () => new[] { 2 * x, -2 * y, 2 * z },

                shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 2
            ));

            constraints.Add(new NonlinearConstraint(f,

                function: () => x * x + y * y + z * z,
                gradient: () => new[] { 2 * x, 2 * y, 2 * z },

                shouldBe: ConstraintType.LesserThanOrEqualTo, value: 10
            ));

            constraints.Add(new NonlinearConstraint(f,

                function: () => x + y,
                gradient: () => new[] { 1.0, 1.0, 0.0 },

                shouldBe: ConstraintType.EqualTo, value: 1
            )
            {
                Tolerance = 1e-5
            });

            var solver = new AugmentedLagrangian(f, constraints);

            solver.Solution[0] = 1;
            solver.Solution[1] = 1;
            solver.Solution[2] = 1;

            Assert.IsTrue(solver.Minimize());
            double minValue = solver.Value;

            Assert.AreEqual(1, solver.Solution[0] + solver.Solution[1], 1e-4);

            Assert.IsFalse(Double.IsNaN(minValue));
            Assert.IsFalse(Double.IsNaN(solver.Solution[0]));
            Assert.IsFalse(Double.IsNaN(solver.Solution[1]));
            Assert.IsFalse(Double.IsNaN(solver.Solution[2]));
        }
예제 #34
0
 /// <summary>
 ///   Creates a new <see cref="NelderMead"/> non-linear optimization algorithm.
 /// </summary>
 ///
 /// <param name="function">The objective function whose optimum values should be found.</param>
 ///
 public NelderMead(NonlinearObjectiveFunction function)
     : base(function)
 {
     init(function.NumberOfVariables);
 }
        public void AugmentedLagrangianSolverConstructorTest5()
        {
            // Suppose we would like to minimize the following function:
            //
            //    f(x,y) = min 100(y-x²)²+(1-x)²
            //
            // Subject to the constraints
            //
            //    x >= 0  (x must be positive)
            //    y >= 0  (y must be positive)
            //

            double x = 0, y = 0;


            // First, we create our objective function
            var f = new NonlinearObjectiveFunction(

                // This is the objective function:  f(x,y) = min 100(y-x²)²+(1-x)²
                function: () => 100 * Math.Pow(y - x * x, 2) + Math.Pow(1 - x, 2),

                // The gradient vector:
                gradient: () => new[] 
                {
                    2 * (200 * Math.Pow(x, 3) - 200 * x * y + x - 1), // df/dx = 2(200x³-200xy+x-1)
                    200 * (y - x*x)                                   // df/dy = 200(y-x²)
                }

            );


            // Now we can start stating the constraints
            var constraints = new List<NonlinearConstraint>();

            // Add the non-negativity constraint for x
            constraints.Add(new NonlinearConstraint(f,

                // 1st constraint: x should be greater than or equal to 0
                function: () => x, shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 0,

                gradient: () => new[] { 1.0, 0.0 }
            ));

            // Add the non-negativity constraint for y
            constraints.Add(new NonlinearConstraint(f,

                // 2nd constraint: y should be greater than or equal to 0
                function: () => y, shouldBe: ConstraintType.GreaterThanOrEqualTo, value: 0,

                gradient: () => new[] { 0.0, 1.0 }
            ));


            // Finally, we create the non-linear programming solver
            var solver = new AugmentedLagrangian(f, constraints);

            // And attempt to solve the problem
            Assert.IsTrue(solver.Minimize());
            double minValue = solver.Value;

            Assert.AreEqual(0, minValue, 1e-10);
            Assert.AreEqual(1, solver.Solution[0], 1e-6);
            Assert.AreEqual(1, solver.Solution[1], 1e-6);

            Assert.IsFalse(Double.IsNaN(minValue));
            Assert.IsFalse(Double.IsNaN(solver.Solution[0]));
            Assert.IsFalse(Double.IsNaN(solver.Solution[1]));
        }
        private static void test1(IGradientOptimizationMethod inner, double tol)
        {

            // maximize 2x + 3y, s.t. 2x² + 2y² <= 50 and x+y = 1

            // Max x' * c
            //  x

            // s.t. x' * A * x <= k
            //      x' * i     = 1
            // lower_bound < x < upper_bound

            double[] c = { 2, 3 };
            double[,] A = { { 2, 0 }, { 0, 2 } };
            double k = 50;

            // Create the objective function
            var objective = new NonlinearObjectiveFunction(2,
                function: (x) => x.InnerProduct(c),
                gradient: (x) => c
            );

            // Test objective
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 10; j++)
                {
                    double expected = i * 2 + j * 3;
                    double actual = objective.Function(new double[] { i, j });
                    Assert.AreEqual(expected, actual);
                }
            }


            // Create the optimization constraints
            var constraints = new List<NonlinearConstraint>();

            constraints.Add(new QuadraticConstraint(objective,
                quadraticTerms: A,
                shouldBe: ConstraintType.LesserThanOrEqualTo, value: k
            ));

            constraints.Add(new NonlinearConstraint(objective,
                function: (x) => x.Sum(),
                gradient: (x) => new[] { 1.0, 1.0 },
                shouldBe: ConstraintType.EqualTo, value: 1,
                withinTolerance: 1e-10
            ));


            // Test first constraint
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 10; j++)
                {
                    double expected = i * (2 * i + 0 * j) + j * (0 * i + 2 * j);
                    double actual = constraints[0].Function(new double[] { i, j });
                    Assert.AreEqual(expected, actual);
                }
            }


            // Test second constraint
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 10; j++)
                {
                    double expected = i + j;
                    double actual = constraints[1].Function(new double[] { i, j });
                    Assert.AreEqual(expected, actual);
                }
            }



            AugmentedLagrangian solver =
                new AugmentedLagrangian(inner, objective, constraints);

            Assert.AreEqual(inner, solver.Optimizer);

            Assert.IsTrue(solver.Maximize());
            double maxValue = solver.Value;

            Assert.AreEqual(6, maxValue, tol);
            Assert.AreEqual(-3, solver.Solution[0], tol);
            Assert.AreEqual(4, solver.Solution[1], tol);
        }
예제 #37
0
        public void ConstructorTest6_2()
        {
            /// This problem is taken from Fletcher's book Practical Methods of
            /// Optimization and has the equation number (9.1.15).
            var function = new NonlinearObjectiveFunction(2, x => -x[0] - x[1]);

            NonlinearConstraint[] constraints = 
            {
                new NonlinearConstraint(2, x =>  -(x[1] - x[0] * x[0]) <= 0),
                new NonlinearConstraint(2, x =>  -(-x[0] * x[0] - x[1] * x[1]) <= 1.0),
            };

            Cobyla cobyla = new Cobyla(function, constraints);

            Assert.IsTrue(cobyla.Minimize());
            double minimum = cobyla.Value;
            double[] solution = cobyla.Solution;

            double sqrthalf = Math.Sqrt(0.5);
            Assert.AreEqual(-sqrthalf * 2, minimum, 1e-10);
            Assert.AreEqual(sqrthalf, solution[0], 1e-5);
            Assert.AreEqual(sqrthalf, solution[1], 1e-5);

            double expectedMinimum = function.Function(cobyla.Solution);
            Assert.AreEqual(expectedMinimum, minimum);
        }