Exemple #1
0
        /// <summary>
        /// Find vector x that minimizes the function f(x) using the Newton algorithm.
        /// For more options and diagnostics consider to use <see cref="NewtonMinimizer"/> directly.
        /// </summary>
        public static Vector <double> OfFunctionGradientHessian(Func <Vector <double>, Tuple <double, Vector <double>, Matrix <double> > > functionGradientHessian, Vector <double> initialGuess, double gradientTolerance = 1e-8, int maxIterations = 1000)
        {
            var objective = ObjectiveFunction.GradientHessian(functionGradientHessian);
            var result    = NewtonMinimizer.Minimum(objective, initialGuess, gradientTolerance, maxIterations);

            return(result.MinimizingPoint);
        }
        public void FindMinimum_Rosenbrock_Easy()
        {
            var obj    = ObjectiveFunction.GradientHessian(RosenbrockFunction.Value, RosenbrockFunction.Gradient, RosenbrockFunction.Hessian);
            var solver = new NewtonMinimizer(1e-5, 1000);
            var result = solver.FindMinimum(obj, new DenseVector(new[] { 1.2, 1.2 }));

            Assert.That(Math.Abs(result.MinimizingPoint[0] - 1.0), Is.LessThan(1e-3));
            Assert.That(Math.Abs(result.MinimizingPoint[1] - 1.0), Is.LessThan(1e-3));
        }
        public void FindMinimum_Linesearch_Rosenbrock_Overton()
        {
            var obj    = new LazyRosenbrockObjectiveFunction();
            var solver = new NewtonMinimizer(1e-5, 1000, true);
            var result = solver.FindMinimum(obj, new DenseVector(new[] { -0.9, -0.5 }));

            Assert.That(Math.Abs(result.MinimizingPoint[0] - 1.0), Is.LessThan(1e-3));
            Assert.That(Math.Abs(result.MinimizingPoint[1] - 1.0), Is.LessThan(1e-3));
        }
        public void FindMinimum_Rosenbrock_Hard()
        {
            var obj    = ObjectiveFunction.GradientHessian(point => Tuple.Create(RosenbrockFunction.Value(point), RosenbrockFunction.Gradient(point), RosenbrockFunction.Hessian(point)));
            var solver = new NewtonMinimizer(1e-5, 1000);
            var result = solver.FindMinimum(obj, new DenseVector(new[] { -1.2, 1.0 }));

            Assert.That(Math.Abs(result.MinimizingPoint[0] - 1.0), Is.LessThan(1e-3));
            Assert.That(Math.Abs(result.MinimizingPoint[1] - 1.0), Is.LessThan(1e-3));
        }
Exemple #5
0
        public void BoxBod_Newton_Der()
        {
            var obj    = ObjectiveFunction.NonlinearFunction(BoxBodModel, BoxBodPrime, BoxBodX, BoxBodY);
            var solver = new NewtonMinimizer(1e-10, 100);
            var result = solver.FindMinimum(obj, BoxBodStart2);

            for (int i = 0; i < result.MinimizingPoint.Count; i++)
            {
                AssertHelpers.AlmostEqualRelative(BoxBodPbest[i], result.MinimizingPoint[i], 6);
            }
        }
Exemple #6
0
 public double NewtonPerpendicular(GeoPoint2D fromHere, double position)
 {
     NewtonMinimizer    nm  = new NewtonMinimizer(1e-6, 10);
     IObjectiveFunction iof = ObjectiveFunction.GradientHessian(
         new Func <Vector <double>, Tuple <double, Vector <double>, Matrix <double> > >(delegate(Vector <double> vd)
     {
         // vd[0] is the 0..1 parameter of the curve
         TryPointDeriv3At(vd[0], out GeoPoint2D point, out GeoVector2D deriv1, out GeoVector2D deriv2, out GeoVector2D deriv3);
         GeoVector2D toPoint      = fromHere - point;
         double s1                = toPoint * deriv1;
         double s2                = (toPoint * deriv2 - deriv1 * deriv1);
         double val               = sqr(s1);
         Vector <double> gradient = new DenseVector(new double[] { 2 * s1 * s2 });
         Matrix <double> hessian  = new DenseMatrix(1, 1);
         hessian[0, 0]            = 2 * s1 * (toPoint * deriv3 - 3 * deriv1 * deriv2) + 2 * sqr(s2);
         return(new Tuple <double, Vector <double>, Matrix <double> >(val, gradient, hessian));
     }));
Exemple #7
0
 /// <summary>
 /// Find the (u,v)-position for the provided point on a surface. Using the NewtonMinimizer from MathNet. Faster than LevenbergMarquardtMinimizer. Surfaces need second derivatives.
 /// </summary>
 /// <param name="surface"></param>
 /// <param name="p3d"></param>
 /// <param name="res"></param>
 /// <param name="mindist"></param>
 /// <returns></returns>
 public static bool PositionOfMN(ISurface surface, GeoPoint p3d, ref GeoPoint2D res, out double mindist)
 {
     NewtonMinimizer    nm  = new NewtonMinimizer(1e-12, 30);
     IObjectiveFunction iof = ObjectiveFunction.GradientHessian(
         new Func <Vector <double>, Tuple <double, Vector <double>, Matrix <double> > >(delegate(Vector <double> vd)
     {
         GeoPoint2D uv = new GeoPoint2D(vd[0], vd[1]);
         surface.Derivation2At(uv, out GeoPoint loc, out GeoVector du, out GeoVector dv, out GeoVector duu, out GeoVector dvv, out GeoVector duv);
         double val = (p3d.x - loc.x) * (p3d.x - loc.x) + (p3d.y - loc.y) * (p3d.y - loc.y) + (p3d.z - loc.z) * (p3d.z - loc.z);
         double u   = -2 * du.x * (p3d.x - loc.x) - 2 * du.y * (p3d.y - loc.y) - 2 * du.z * (p3d.z - loc.z);
         double v   = -2 * dv.x * (p3d.x - loc.x) - 2 * dv.y * (p3d.y - loc.y) - 2 * dv.z * (p3d.z - loc.z);
         Vector <double> gradient = new DenseVector(new double[] { u, v });
         Matrix <double> hessian  = new DenseMatrix(2, 2);
         hessian[0, 0]            = -2 * duu.z * (p3d.z - loc.z) - 2 * duu.y * (p3d.y - loc.y) - 2 * duu.x * (p3d.x - loc.x) + 2 * du.z * du.z + 2 * du.y * du.y + 2 * du.x * du.x;
         hessian[1, 1]            = -2 * dvv.z * (p3d.z - loc.z) - 2 * dvv.y * (p3d.y - loc.y) - 2 * dvv.x * (p3d.x - loc.x) + 2 * dv.z * dv.z + 2 * dv.y * dv.y + 2 * dv.x * dv.x;
         hessian[0, 1]            = hessian[1, 0] = -2 * duv.z * (p3d.z - loc.z) - 2 * duv.y * (p3d.y - loc.y) - 2 * duv.x * (p3d.x - loc.x) + 2 * du.z * dv.z + 2 * du.y * dv.y + 2 * du.x * dv.x;
         return(new Tuple <double, Vector <double>, Matrix <double> >(val, gradient, hessian));
     }));
        public void Mgh_Tests(TestFunctions.TestCase test_case)
        {
            var obj = new MghObjectiveFunction(test_case.Function, true, true);

            var result = NewtonMinimizer.Minimum(obj, test_case.InitialGuess, 1e-8, 1000, useLineSearch: false);

            if (test_case.MinimizingPoint != null)
            {
                Assert.That((result.MinimizingPoint - test_case.MinimizingPoint).L2Norm(), Is.LessThan(1e-3));
            }

            var val1    = result.FunctionInfoAtMinimum.Value;
            var val2    = test_case.MinimalValue;
            var abs_min = Math.Min(Math.Abs(val1), Math.Abs(val2));
            var abs_err = Math.Abs(val1 - val2);
            var rel_err = abs_err / abs_min;
            var success = (abs_min <= 1 && abs_err < 1e-3) || (abs_min > 1 && rel_err < 1e-3);

            Assert.That(success, "Minimal function value is not as expected.");
        }