示例#1
0
        protected int DoBfgsUpdate(ref ExitCondition currentExitCondition, WolfeLineSearch lineSearcher, ref Matrix <double> inversePseudoHessian, ref Vector <double> lineSearchDirection, ref IObjectiveFunction previousPoint, ref LineSearchResult lineSearchResult, ref IObjectiveFunction candidate, ref Vector <double> step, ref int totalLineSearchSteps, ref int iterationsWithNontrivialLineSearch)
        {
            int iterations;

            for (iterations = 1; iterations < MaximumIterations; ++iterations)
            {
                double startingStepSize;
                double maxLineSearchStep;
                lineSearchDirection = CalculateSearchDirection(ref inversePseudoHessian, out maxLineSearchStep, out startingStepSize, previousPoint, candidate, step);

                try
                {
                    lineSearchResult = lineSearcher.FindConformingStep(candidate, lineSearchDirection, startingStepSize, maxLineSearchStep);
                }
                catch (Exception e)
                {
                    throw new InnerOptimizationException("Line search failed.", e);
                }

                iterationsWithNontrivialLineSearch += lineSearchResult.Iterations > 0 ? 1 : 0;
                totalLineSearchSteps += lineSearchResult.Iterations;

                step          = lineSearchResult.FunctionInfoAtMinimum.Point - candidate.Point;
                previousPoint = candidate;
                candidate     = lineSearchResult.FunctionInfoAtMinimum;

                currentExitCondition = ExitCriteriaSatisfied(candidate, previousPoint, iterations);
                if (currentExitCondition != ExitCondition.None)
                {
                    break;
                }
            }

            return(iterations);
        }
示例#2
0
        public NonlinearMinimizationResult(IObjectiveModel modelInfo, int iterations, ExitCondition reasonForExit)
        {
            ModelInfoAtMinimum = modelInfo;
            Iterations         = iterations;
            ReasonForExit      = reasonForExit;

            EvaluateCovariance(modelInfo);
        }
示例#3
0
        private void InterpreterOnExecutionComplete(object sender, ExitCondition exitCondition)
        {
            void MethodInvokerDelegate()
            {
                statusRunTime.Text = string.Format(Resources.LogExitedAfter, _stopwatch.Elapsed);
                tsbHalt.Enabled    = false;
            }

            Invoke((MethodInvoker)MethodInvokerDelegate);

            _stopwatch.Stop();
        }
示例#4
0
        public void RunSteps()
        {
            st.Start();

            while (exitCondition == ExitCondition.Running)
            {
                Actions.Clear();
                DoorActions.Clear();
                SaveState();
                actualCost       = CostCalculationService.CalculateCostNew(Model);
                actualCost.Index = CurrentIndex;
                CalculateCostsForState();

                bool m = Model.IsInInvalidState;

                MakeAStepByTheCalculatedCosts();

                //CalculateDoorCosts();
                //MakeStepByDoorChanges();

                HandleModelChangeUpdate();

                if (CurrentIndex >= MaxIndex)
                {
                    exitCondition = ExitCondition.isFinished;
                }
                if (st.ElapsedMilliseconds > maxSeconds * 1000)
                {
                    exitCondition = ExitCondition.isTimeout;
                }
                if (ActualTreshold >= MaxTreshold)
                {
                    exitCondition = ExitCondition.isTreshold;
                }
                CurrentIndex++;
                Thread.Sleep(5);
            }

            Logger.WriteLog($"Run Ended.\nExitCondition : {exitCondition}");
            exitCondition  = ExitCondition.Running;
            ActualTreshold = 0;
            MaxIndex      += MaxIndex;

            st.Reset();
        }
示例#5
0
 public LineSearchResult(IObjectiveFunction functionInfo, int iterations, double finalStep, ExitCondition reasonForExit)
     : base(functionInfo, iterations, reasonForExit)
 {
     FinalStep = finalStep;
 }
        /// <summary>
        /// Non-linear least square fitting by the Levenberg-Marduardt algorithm.
        /// </summary>
        /// <param name="objective">The objective function, including model, observations, and parameter bounds.</param>
        /// <param name="initialGuess">The initial guess values.</param>
        /// <param name="initialMu">The initial damping parameter of mu.</param>
        /// <param name="gradientTolerance">The stopping threshold for infinity norm of the gradient vector.</param>
        /// <param name="stepTolerance">The stopping threshold for L2 norm of the change of parameters.</param>
        /// <param name="functionTolerance">The stopping threshold for L2 norm of the residuals.</param>
        /// <param name="maximumIterations">The max iterations.</param>
        /// <returns>The result of the Levenberg-Marquardt minimization</returns>
        public static NonlinearMinimizationResult Minimum(IObjectiveModel objective, Vector <double> initialGuess,
                                                          Vector <double> lowerBound = null, Vector <double> upperBound = null, Vector <double> scales = null, List <bool> isFixed      = null,
                                                          double initialMu           = 1E-3, double gradientTolerance   = 1E-15, double stepTolerance = 1E-15, double functionTolerance = 1E-15, int maximumIterations = -1)
        {
            // Non-linear least square fitting by the Levenberg-Marduardt algorithm.
            //
            // Levenberg-Marquardt is finding the minimum of a function F(p) that is a sum of squares of nonlinear functions.
            //
            // For given datum pair (x, y), uncertainties σ (or weighting W  =  1 / σ^2) and model function f = f(x; p),
            // let's find the parameters of the model so that the sum of the quares of the deviations is minimized.
            //
            //    F(p) = 1/2 * ∑{ Wi * (yi - f(xi; p))^2 }
            //    pbest = argmin F(p)
            //
            // We will use the following terms:
            //    Weighting W is the diagonal matrix and can be decomposed as LL', so L = 1/σ
            //    Residuals, R = L(y - f(x; p))
            //    Residual sum of squares, RSS = ||R||^2 = R.DotProduct(R)
            //    Jacobian J = df(x; p)/dp
            //    Gradient g = -J'W(y − f(x; p)) = -J'LR
            //    Approximated Hessian H = J'WJ
            //
            // The Levenberg-Marquardt algorithm is summarized as follows:
            //    initially let μ = τ * max(diag(H)).
            //    repeat
            //       solve linear equations: (H + μI)ΔP = -g
            //       let ρ = (||R||^2 - ||Rnew||^2) / (Δp'(μΔp - g)).
            //       if ρ > ε, P = P + ΔP; μ = μ * max(1/3, 1 - (2ρ - 1)^3); ν = 2;
            //       otherwise μ = μ*ν; ν = 2*ν;
            //
            // References:
            // [1]. Madsen, K., H. B. Nielsen, and O. Tingleff.
            //    "Methods for Non-Linear Least Squares Problems. Technical University of Denmark, 2004. Lecture notes." (2004).
            //    Available Online from: http://orbit.dtu.dk/files/2721358/imm3215.pdf
            // [2]. Gavin, Henri.
            //    "The Levenberg-Marquardt method for nonlinear least squares curve-fitting problems."
            //    Department of Civil and Environmental Engineering, Duke University (2017): 1-19.
            //    Availble Online from: http://people.duke.edu/~hpgavin/ce281/lm.pdf

            if (objective == null)
            {
                throw new ArgumentNullException("objective");
            }

            ValidateBounds(initialGuess, lowerBound, upperBound, scales);

            objective.SetParameters(initialGuess, isFixed);

            ExitCondition exitCondition = ExitCondition.None;

            // First, calculate function values and setup variables
            var P     = ProjectToInternalParameters(initialGuess); // current internal parameters
            var Pstep = Vector <double> .Build.Dense(P.Count);     // the change of parameters

            var RSS = EvaluateFunction(objective, P);              // Residual Sum of Squares = R'R

            if (maximumIterations < 0)
            {
                maximumIterations = 200 * (initialGuess.Count + 1);
            }

            // if RSS == NaN, stop
            if (double.IsNaN(RSS))
            {
                exitCondition = ExitCondition.InvalidValues;
                return(new NonlinearMinimizationResult(objective, -1, exitCondition));
            }

            // When only function evaluation is needed, set maximumIterations to zero,
            if (maximumIterations == 0)
            {
                exitCondition = ExitCondition.ManuallyStopped;
            }

            // if RSS <= fTol, stop
            if (RSS <= functionTolerance)
            {
                exitCondition = ExitCondition.Converged; // SmallRSS
            }

            // Evaluate gradient and Hessian
            var jac               = EvaluateJacobian(objective, P);
            var Gradient          = jac.Item1;          // objective.Gradient;
            var Hessian           = jac.Item2;          // objective.Hessian;
            var diagonalOfHessian = Hessian.Diagonal(); // diag(H)

            // if ||g||oo <= gtol, found and stop
            if (Gradient.InfinityNorm() <= gradientTolerance)
            {
                exitCondition = ExitCondition.RelativeGradient;
            }

            if (exitCondition != ExitCondition.None)
            {
                return(new NonlinearMinimizationResult(objective, -1, exitCondition));
            }

            double mu         = initialMu * diagonalOfHessian.Max(); // μ
            double nu         = 2;                                   //  ν
            int    iterations = 0;

            while (iterations < maximumIterations && exitCondition == ExitCondition.None)
            {
                iterations++;

                while (true)
                {
                    Hessian.SetDiagonal(Hessian.Diagonal() + mu); // hessian[i, i] = hessian[i, i] + mu;

                    // solve normal equations
                    Pstep = Hessian.Solve(-Gradient);

                    // if ||ΔP|| <= xTol * (||P|| + xTol), found and stop
                    if (Pstep.L2Norm() <= stepTolerance * (stepTolerance + P.DotProduct(P)))
                    {
                        exitCondition = ExitCondition.RelativePoints;
                        break;
                    }

                    var Pnew = P + Pstep; // new parameters to test
                    // evaluate function at Pnew
                    var RSSnew = EvaluateFunction(objective, Pnew);

                    if (double.IsNaN(RSSnew))
                    {
                        exitCondition = ExitCondition.InvalidValues;
                        break;
                    }

                    // calculate the ratio of the actual to the predicted reduction.
                    // ρ = (RSS - RSSnew) / (Δp'(μΔp - g))
                    var predictedReduction = Pstep.DotProduct(mu * Pstep - Gradient);
                    var rho = (predictedReduction != 0)
                            ? (RSS - RSSnew) / predictedReduction
                            : 0;

                    if (rho > 0.0)
                    {
                        // accepted
                        Pnew.CopyTo(P);
                        RSS = RSSnew;

                        // update gradient and Hessian
                        jac               = EvaluateJacobian(objective, P);
                        Gradient          = jac.Item1; // objective.Gradient;
                        Hessian           = jac.Item2; // objective.Hessian;
                        diagonalOfHessian = Hessian.Diagonal();

                        // if ||g||_oo <= gtol, found and stop
                        if (Gradient.InfinityNorm() <= gradientTolerance)
                        {
                            exitCondition = ExitCondition.RelativeGradient;
                        }

                        // if ||R||^2 < fTol, found and stop
                        if (RSS <= functionTolerance)
                        {
                            exitCondition = ExitCondition.Converged; // SmallRSS
                        }

                        mu = mu * Math.Max(1.0 / 3.0, 1.0 - Math.Pow(2.0 * rho - 1.0, 3));
                        nu = 2;

                        break;
                    }
                    else
                    {
                        // rejected, increased μ
                        mu = mu * nu;
                        nu = 2 * nu;

                        Hessian.SetDiagonal(diagonalOfHessian);
                    }
                }
            }

            if (iterations >= maximumIterations)
            {
                exitCondition = ExitCondition.ExceedIterations;
            }

            return(new NonlinearMinimizationResult(objective, iterations, exitCondition));
        }
示例#7
0
 public MinimizationWithLineSearchResult(IObjectiveFunction functionInfo, int iterations, ExitCondition reasonForExit, int totalLineSearchIterations, int iterationsWithNonTrivialLineSearch)
     : base(functionInfo, iterations, reasonForExit)
 {
     TotalLineSearchIterations          = totalLineSearchIterations;
     IterationsWithNonTrivialLineSearch = iterationsWithNonTrivialLineSearch;
 }
        /// <summary>
        /// Non-linear least square fitting by the trust-region algorithm.
        /// </summary>
        /// <param name="objective">The objective model, including function, jacobian, observations, and parameter bounds.</param>
        /// <param name="subproblem">The subproblem</param>
        /// <param name="initialGuess">The initial guess values.</param>
        /// <param name="functionTolerance">The stopping threshold for L2 norm of the residuals.</param>
        /// <param name="gradientTolerance">The stopping threshold for infinity norm of the gradient vector.</param>
        /// <param name="stepTolerance">The stopping threshold for L2 norm of the change of parameters.</param>
        /// <param name="radiusTolerance">The stopping threshold for trust region radius</param>
        /// <param name="maximumIterations">The max iterations.</param>
        /// <returns></returns>
        public static NonlinearMinimizationResult Minimum(ITrustRegionSubproblem subproblem, IObjectiveModel objective, Vector <double> initialGuess,
                                                          Vector <double> lowerBound = null, Vector <double> upperBound = null, Vector <double> scales   = null, List <bool> isFixed    = null,
                                                          double gradientTolerance   = 1E-8, double stepTolerance       = 1E-8, double functionTolerance = 1E-8, double radiusTolerance = 1E-18, int maximumIterations = -1)
        {
            // Non-linear least square fitting by the trust-region algorithm.
            //
            // For given datum pair (x, y), uncertainties σ (or weighting W  =  1 / σ^2) and model function f = f(x; p),
            // let's find the parameters of the model so that the sum of the quares of the deviations is minimized.
            //
            //    F(p) = 1/2 * ∑{ Wi * (yi - f(xi; p))^2 }
            //    pbest = argmin F(p)
            //
            // Here, we will use the following terms:
            //    Weighting W is the diagonal matrix and can be decomposed as LL', so L = 1/σ
            //    Residuals, R = L(y - f(x; p))
            //    Residual sum of squares, RSS = ||R||^2 = R.DotProduct(R)
            //    Jacobian J = df(x; p)/dp
            //    Gradient g = -J'W(y − f(x; p)) = -J'LR
            //    Approximated Hessian H = J'WJ
            //
            // The trust region algorithm is summarized as follows:
            //    initially set trust-region radius, Δ
            //    repeat
            //       solve subproblem
            //       update Δ:
            //          let ρ = (RSS - RSSnew) / predRed
            //          if ρ > 0.75, Δ = 2Δ
            //          if ρ < 0.25, Δ = Δ/4
            //          if ρ > eta, P = P + ΔP
            //
            // References:
            // [1]. Madsen, K., H. B. Nielsen, and O. Tingleff.
            //    "Methods for Non-Linear Least Squares Problems. Technical University of Denmark, 2004. Lecture notes." (2004).
            //    Available Online from: http://orbit.dtu.dk/files/2721358/imm3215.pdf
            // [2]. Nocedal, Jorge, and Stephen J. Wright.
            //    Numerical optimization (2006): 101-134.
            // [3]. SciPy
            //    Available Online from: https://github.com/scipy/scipy/blob/master/scipy/optimize/_trustregion.py

            double maxDelta = 1000;
            double eta      = 0;

            if (objective == null)
            {
                throw new ArgumentNullException("objective");
            }

            ValidateBounds(initialGuess, lowerBound, upperBound, scales);

            objective.SetParameters(initialGuess, isFixed);

            ExitCondition exitCondition = ExitCondition.None;

            // First, calculate function values and setup variables
            var P     = ProjectToInternalParameters(initialGuess); // current internal parameters
            var Pstep = Vector <double> .Build.Dense(P.Count);     // the change of parameters

            var RSS = EvaluateFunction(objective, initialGuess);   // Residual Sum of Squares

            if (maximumIterations < 0)
            {
                maximumIterations = 200 * (initialGuess.Count + 1);
            }

            // if RSS == NaN, stop
            if (double.IsNaN(RSS))
            {
                exitCondition = ExitCondition.InvalidValues;
                return(new NonlinearMinimizationResult(objective, -1, exitCondition));
            }

            // When only function evaluation is needed, set maximumIterations to zero,
            if (maximumIterations == 0)
            {
                exitCondition = ExitCondition.ManuallyStopped;
            }

            // if ||R||^2 <= fTol, stop
            if (RSS <= functionTolerance)
            {
                exitCondition = ExitCondition.Converged; // SmallRSS
            }

            // evaluate projected gradient and Hessian
            var jac      = EvaluateJacobian(objective, P);
            var Gradient = jac.Item1; // objective.Gradient;
            var Hessian  = jac.Item2; // objective.Hessian;

            // if ||g||_oo <= gtol, found and stop
            if (Gradient.InfinityNorm() <= gradientTolerance)
            {
                exitCondition = ExitCondition.RelativeGradient; // SmallGradient
            }

            if (exitCondition != ExitCondition.None)
            {
                return(new NonlinearMinimizationResult(objective, -1, exitCondition));
            }

            // initialize trust-region radius, Δ
            double delta = Gradient.DotProduct(Gradient) / (Hessian * Gradient).DotProduct(Gradient);

            delta = Math.Max(1.0, Math.Min(delta, maxDelta));

            int  iterations  = 0;
            bool hitBoundary = false;

            while (iterations < maximumIterations && exitCondition == ExitCondition.None)
            {
                iterations++;

                // solve the subproblem
                subproblem.Solve(objective, delta);
                Pstep       = subproblem.Pstep;
                hitBoundary = subproblem.HitBoundary;

                // predicted reduction = L(0) - L(Δp) = -Δp'g - 1/2 * Δp'HΔp
                var predictedReduction = -Gradient.DotProduct(Pstep) - 0.5 * Pstep.DotProduct(Hessian * Pstep);

                if (Pstep.L2Norm() <= stepTolerance * (stepTolerance + P.L2Norm()))
                {
                    exitCondition = ExitCondition.RelativePoints; // SmallRelativeParameters
                    break;
                }

                var Pnew = P + Pstep; // parameters to test
                // evaluate function at Pnew
                var RSSnew = EvaluateFunction(objective, Pnew);

                // if RSS == NaN, stop
                if (double.IsNaN(RSSnew))
                {
                    exitCondition = ExitCondition.InvalidValues;
                    break;
                }

                // calculate the ratio of the actual to the predicted reduction.
                double rho = (predictedReduction != 0)
                        ? (RSS - RSSnew) / predictedReduction
                        : 0.0;

                if (rho > 0.75 && hitBoundary)
                {
                    delta = Math.Min(2.0 * delta, maxDelta);
                }
                else if (rho < 0.25)
                {
                    delta = delta * 0.25;
                    if (delta <= radiusTolerance * (radiusTolerance + P.DotProduct(P)))
                    {
                        exitCondition = ExitCondition.LackOfProgress;
                        break;
                    }
                }

                if (rho > eta)
                {
                    // accepted
                    Pnew.CopyTo(P);
                    RSS = RSSnew;

                    // evaluate projected gradient and Hessian
                    jac      = EvaluateJacobian(objective, P);
                    Gradient = jac.Item1; // objective.Gradient;
                    Hessian  = jac.Item2; // objective.Hessian;

                    // if ||g||_oo <= gtol, found and stop
                    if (Gradient.InfinityNorm() <= gradientTolerance)
                    {
                        exitCondition = ExitCondition.RelativeGradient;
                    }

                    // if ||R||^2 < fTol, found and stop
                    if (RSS <= functionTolerance)
                    {
                        exitCondition = ExitCondition.Converged; // SmallRSS
                    }
                }
            }

            if (iterations >= maximumIterations)
            {
                exitCondition = ExitCondition.ExceedIterations;
            }

            return(new NonlinearMinimizationResult(objective, iterations, exitCondition));
        }
示例#9
0
        // Implemented following http://www.math.washington.edu/~burke/crs/408/lectures/L9-weak-Wolfe.pdf
        public LineSearchOutput FindConformingStep(IObjectiveFunction objective, IEvaluation starting_point, Vector <double> search_direction, double initial_step, double upper_bound = Double.PositiveInfinity)
        {
            if (!(objective is ObjectiveChecker))
            {
                objective = new ObjectiveChecker(objective, this.ValidateValue, this.ValidateGradient, null);
            }

            double lower_bound = 0.0;
            double step        = initial_step;

            double          initial_value    = starting_point.Value;
            Vector <double> initial_gradient = starting_point.Gradient;

            double initial_dd = search_direction * initial_gradient;

            int           ii;
            IEvaluation   candidate_eval  = null;
            ExitCondition reason_for_exit = ExitCondition.None;

            for (ii = 0; ii < this.MaximumIterations; ++ii)
            {
                candidate_eval = objective.Evaluate(starting_point.Point + search_direction * step);

                double step_dd = search_direction * candidate_eval.Gradient;

                if (candidate_eval.Value > initial_value + this.C1 * step * initial_dd)
                {
                    upper_bound = step;
                    step        = 0.5 * (lower_bound + upper_bound);
                }
                else if (Math.Abs(step_dd) > this.C2 * Math.Abs(initial_dd))
                {
                    lower_bound = step;
                    step        = Double.IsPositiveInfinity(upper_bound) ? 2 * lower_bound : 0.5 * (lower_bound + upper_bound);
                }
                else
                {
                    reason_for_exit = ExitCondition.StrongWolfeCriteria;
                    break;
                }

                if (!Double.IsInfinity(upper_bound))
                {
                    double max_rel_change = 0.0;
                    for (int jj = 0; jj < candidate_eval.Point.Count; ++jj)
                    {
                        double tmp = Math.Abs(search_direction[jj] * (upper_bound - lower_bound)) / Math.Max(Math.Abs(candidate_eval.Point[jj]), 1.0);
                        max_rel_change = Math.Max(max_rel_change, tmp);
                    }
                    if (max_rel_change < this.ParameterTolerance)
                    {
                        reason_for_exit = ExitCondition.LackOfProgress;
                        break;
                    }
                }
            }

            if (ii == this.MaximumIterations && Double.IsPositiveInfinity(upper_bound))
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached. Function appears to be unbounded in search direction.", this.MaximumIterations));
            }
            else if (ii == this.MaximumIterations)
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached.", this.MaximumIterations));
            }
            else
            {
                return(new LineSearchOutput(candidate_eval, ii, step, reason_for_exit));
            }
        }
示例#10
0
        /// <summary></summary>
        /// <param name="startingPoint">The objective function being optimized, evaluated at the starting point of the search</param>
        /// <param name="searchDirection">Search direction</param>
        /// <param name="initialStep">Initial size of the step in the search direction</param>
        /// <param name="upperBound">The upper bound</param>
        public LineSearchResult FindConformingStep(IObjectiveFunctionEvaluation startingPoint, Vector <double> searchDirection, double initialStep, double upperBound)
        {
            ValidateInputArguments(startingPoint, searchDirection, initialStep, upperBound);

            double lowerBound = 0.0;
            double step       = initialStep;

            double          initialValue    = startingPoint.Value;
            Vector <double> initialGradient = startingPoint.Gradient;

            double initialDd = searchDirection * initialGradient;

            IObjectiveFunction objective = startingPoint.CreateNew();
            int           ii;
            ExitCondition reasonForExit = ExitCondition.None;

            for (ii = 0; ii < MaximumIterations; ++ii)
            {
                objective.EvaluateAt(startingPoint.Point + searchDirection * step);
                ValidateGradient(objective);
                ValidateValue(objective);

                double stepDd = searchDirection * objective.Gradient;

                if (objective.Value > initialValue + C1 * step * initialDd)
                {
                    upperBound = step;
                    step       = 0.5 * (lowerBound + upperBound);
                }
                else if (WolfeCondition(stepDd, initialDd))
                {
                    lowerBound = step;
                    step       = double.IsPositiveInfinity(upperBound) ? 2 * lowerBound : 0.5 * (lowerBound + upperBound);
                }
                else
                {
                    reasonForExit = WolfeExitCondition;
                    break;
                }

                if (!double.IsInfinity(upperBound))
                {
                    double maxRelChange = 0.0;
                    for (int jj = 0; jj < objective.Point.Count; ++jj)
                    {
                        double tmp = Math.Abs(searchDirection[jj] * (upperBound - lowerBound)) / Math.Max(Math.Abs(objective.Point[jj]), 1.0);
                        maxRelChange = Math.Max(maxRelChange, tmp);
                    }
                    if (maxRelChange < ParameterTolerance)
                    {
                        reasonForExit = ExitCondition.LackOfProgress;
                        break;
                    }
                }
            }

            if (ii == MaximumIterations && Double.IsPositiveInfinity(upperBound))
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached. Function appears to be unbounded in search direction.", MaximumIterations));
            }

            if (ii == MaximumIterations)
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached.", MaximumIterations));
            }

            return(new LineSearchResult(objective, ii, step, reasonForExit));
        }
示例#11
0
 public MinimizationWithLineSearchOutput(IEvaluation function_info, int iterations, ExitCondition reason_for_exit, int total_line_search_iterations, int iterations_with_non_trivial_line_search)
     : base(function_info, iterations, reason_for_exit)
 {
     this.TotalLineSearchIterations          = total_line_search_iterations;
     this.IterationsWithNonTrivialLineSearch = iterations_with_non_trivial_line_search;
 }
示例#12
0
        public MinimizationOutput FindMinimum(IObjectiveFunction objective, Vector <double> lower_bound, Vector <double> upper_bound, Vector <double> initial_guess)
        {
            if (!objective.GradientSupported)
            {
                throw new IncompatibleObjectiveException("Gradient not supported in objective function, but required for BFGS minimization.");
            }

            if (!(objective is ObjectiveChecker))
            {
                objective = new ObjectiveChecker(objective, this.ValidateObjective, this.ValidateGradient, null);
            }

            // Check that dimensions match
            if (lower_bound.Count != upper_bound.Count || lower_bound.Count != initial_guess.Count)
            {
                throw new ArgumentException("Dimensions of bounds and/or initial guess do not match.");
            }

            // Check that initial guess is feasible
            for (int ii = 0; ii < initial_guess.Count; ++ii)
            {
                if (initial_guess[ii] < lower_bound[ii] || initial_guess[ii] > upper_bound[ii])
                {
                    throw new ArgumentException("Initial guess is not in the feasible region");
                }
            }

            IEvaluation initial_eval = objective.Evaluate(initial_guess);

            // Check that we're not already done
            ExitCondition current_exit_condition = this.ExitCriteriaSatisfied(initial_eval, null, lower_bound, upper_bound, 0);

            if (current_exit_condition != ExitCondition.None)
            {
                return(new MinimizationOutput(initial_eval, 0, current_exit_condition));
            }

            // Set up line search algorithm
            var line_searcher = new StrongWolfeLineSearch(1e-4, 0.9, Math.Max(this.ParameterTolerance, 1e-5), max_iterations: 1000);

            // Declare state variables
            IEvaluation     candidate_point, previous_point;
            double          step_size;
            Vector <double> gradient, step, line_search_direction, reduced_solution1, reduced_gradient, reduced_initial_point, reduced_cauchy_point, solution1;
            Matrix <double> pseudo_hessian, reduced_hessian;
            List <int>      reduced_map;

            // First step
            pseudo_hessian = DiagonalMatrix.CreateIdentity(initial_guess.Count);

            // Determine active set
            var gradient_projection_result = QuadraticGradientProjectionSearch.search(initial_eval.Point, initial_eval.Gradient, pseudo_hessian, lower_bound, upper_bound);
            var cauchy_point = gradient_projection_result.Item1;
            var fixed_count  = gradient_projection_result.Item2;
            var is_fixed     = gradient_projection_result.Item3;
            var free_count   = lower_bound.Count - fixed_count;

            if (free_count > 0)
            {
                reduced_gradient      = new DenseVector(free_count);
                reduced_hessian       = new DenseMatrix(free_count, free_count);
                reduced_map           = new List <int>(free_count);
                reduced_initial_point = new DenseVector(free_count);
                reduced_cauchy_point  = new DenseVector(free_count);

                CreateReducedData(initial_eval.Point, cauchy_point, is_fixed, lower_bound, upper_bound, initial_eval.Gradient, pseudo_hessian, reduced_initial_point, reduced_cauchy_point, reduced_gradient, reduced_hessian, reduced_map);

                // Determine search direction and maximum step size
                reduced_solution1 = reduced_initial_point + reduced_hessian.Cholesky().Solve(-reduced_gradient);

                solution1 = reduced_to_full(reduced_map, reduced_solution1, cauchy_point);
            }
            else
            {
                solution1 = cauchy_point;
            }

            var direction_from_cauchy      = solution1 - cauchy_point;
            var max_step_from_cauchy_point = FindMaxStep(cauchy_point, direction_from_cauchy, lower_bound, upper_bound);

            var solution2 = cauchy_point + Math.Min(max_step_from_cauchy_point, 1.0) * direction_from_cauchy;

            line_search_direction = solution2 - initial_eval.Point;
            var max_line_search_step = FindMaxStep(initial_eval.Point, line_search_direction, lower_bound, upper_bound);
            var est_step_size        = -initial_eval.Gradient * line_search_direction / (line_search_direction * pseudo_hessian * line_search_direction);

            var starting_step_size = Math.Min(Math.Max(est_step_size, 1.0), max_line_search_step);

            // Line search
            LineSearchOutput result;

            try
            {
                result = line_searcher.FindConformingStep(objective, initial_eval, line_search_direction, starting_step_size, upper_bound: max_line_search_step);
            }
            catch (Exception e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }

            previous_point  = initial_eval;
            candidate_point = result.FunctionInfoAtMinimum;
            gradient        = candidate_point.Gradient;
            step            = candidate_point.Point - initial_guess;
            step_size       = result.FinalStep;

            // Subsequent steps
            int iterations;
            int total_line_search_steps = result.Iterations;
            int iterations_with_nontrivial_line_search = result.Iterations > 0 ? 0 : 1;
            int steepest_descent_resets = 0;

            for (iterations = 1; iterations < this.MaximumIterations; ++iterations)
            {
                // Do BFGS update
                var y = candidate_point.Gradient - previous_point.Gradient;

                double sy = step * y;
                if (sy > 0.0) // only do update if it will create a positive definite matrix
                {
                    double sts = step * step;
                    //inverse_pseudo_hessian = inverse_pseudo_hessian + ((sy + y * inverse_pseudo_hessian * y) / Math.Pow(sy, 2.0)) * step.OuterProduct(step) - ((inverse_pseudo_hessian * y.ToColumnMatrix()) * step.ToRowMatrix() + step.ToColumnMatrix() * (y.ToRowMatrix() * inverse_pseudo_hessian)) * (1.0 / sy);
                    var Hs  = pseudo_hessian * step;
                    var sHs = step * pseudo_hessian * step;
                    pseudo_hessian = pseudo_hessian + y.OuterProduct(y) * (1.0 / sy) - Hs.OuterProduct(Hs) * (1.0 / sHs);
                }
                else
                {
                    steepest_descent_resets += 1;
                    //pseudo_hessian = LinearAlgebra.Double.DiagonalMatrix.Identity(initial_guess.Count);
                }

                // Determine active set
                gradient_projection_result = QuadraticGradientProjectionSearch.search(candidate_point.Point, candidate_point.Gradient, pseudo_hessian, lower_bound, upper_bound);
                cauchy_point = gradient_projection_result.Item1;
                fixed_count  = gradient_projection_result.Item2;
                is_fixed     = gradient_projection_result.Item3;
                free_count   = lower_bound.Count - fixed_count;

                if (free_count > 0)
                {
                    reduced_gradient      = new DenseVector(free_count);
                    reduced_hessian       = new DenseMatrix(free_count, free_count);
                    reduced_map           = new List <int>(free_count);
                    reduced_initial_point = new DenseVector(free_count);
                    reduced_cauchy_point  = new DenseVector(free_count);

                    CreateReducedData(candidate_point.Point, cauchy_point, is_fixed, lower_bound, upper_bound, candidate_point.Gradient, pseudo_hessian, reduced_initial_point, reduced_cauchy_point, reduced_gradient, reduced_hessian, reduced_map);

                    // Determine search direction and maximum step size
                    reduced_solution1 = reduced_initial_point + reduced_hessian.Cholesky().Solve(-reduced_gradient);

                    solution1 = reduced_to_full(reduced_map, reduced_solution1, cauchy_point);
                }
                else
                {
                    solution1 = cauchy_point;
                }

                direction_from_cauchy      = solution1 - cauchy_point;
                max_step_from_cauchy_point = FindMaxStep(cauchy_point, direction_from_cauchy, lower_bound, upper_bound);
                //var cauchy_eval = objective.Evaluate(cauchy_point);

                solution2 = cauchy_point + Math.Min(max_step_from_cauchy_point, 1.0) * direction_from_cauchy;

                line_search_direction = solution2 - candidate_point.Point;
                max_line_search_step  = FindMaxStep(candidate_point.Point, line_search_direction, lower_bound, upper_bound);

                //line_search_direction = solution1 - candidate_point.Point;
                //max_line_search_step = FindMaxStep(candidate_point.Point, line_search_direction, lower_bound, upper_bound);

                if (max_line_search_step == 0.0)
                {
                    line_search_direction = cauchy_point - candidate_point.Point;
                    max_line_search_step  = FindMaxStep(candidate_point.Point, line_search_direction, lower_bound, upper_bound);
                }

                est_step_size = -candidate_point.Gradient * line_search_direction / (line_search_direction * pseudo_hessian * line_search_direction);

                starting_step_size = Math.Min(Math.Max(est_step_size, 1.0), max_line_search_step);

                // Line search
                try
                {
                    result = line_searcher.FindConformingStep(objective, candidate_point, line_search_direction, starting_step_size, upper_bound: max_line_search_step);
                    //result = line_searcher.FindConformingStep(objective, cauchy_eval, direction_from_cauchy, Math.Min(1.0, max_step_from_cauchy_point), upper_bound: max_step_from_cauchy_point);
                }
                catch (Exception e)
                {
                    throw new InnerOptimizationException("Line search failed.", e);
                }

                iterations_with_nontrivial_line_search += result.Iterations > 0 ? 1 : 0;
                total_line_search_steps += result.Iterations;

                step_size       = result.FinalStep;
                step            = result.FunctionInfoAtMinimum.Point - candidate_point.Point;
                previous_point  = candidate_point;
                candidate_point = result.FunctionInfoAtMinimum;

                current_exit_condition = this.ExitCriteriaSatisfied(candidate_point, previous_point, lower_bound, upper_bound, iterations);
                if (current_exit_condition != ExitCondition.None)
                {
                    break;
                }
            }

            if (iterations == this.MaximumIterations && current_exit_condition == ExitCondition.None)
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached.", this.MaximumIterations));
            }

            return(new MinimizationWithLineSearchOutput(candidate_point, iterations, current_exit_condition, total_line_search_steps, iterations_with_nontrivial_line_search));
        }
示例#13
0
 public MinimizationOutput(IEvaluation function_info, int iterations, ExitCondition reason_for_exit)
 {
     this.FunctionInfoAtMinimum = function_info;
     this.Iterations            = iterations;
     this.ReasonForExit         = reason_for_exit;
 }
示例#14
0
        /// <summary>
        /// Find the minimum of the objective function given lower and upper bounds
        /// </summary>
        /// <param name="objective">The objective function, must support a gradient</param>
        /// <param name="lowerBound">The lower bound</param>
        /// <param name="upperBound">The upper bound</param>
        /// <param name="initialGuess">The initial guess</param>
        /// <returns>The MinimizationResult which contains the minimum and the ExitCondition</returns>
        public MinimizationResult FindMinimum(IObjectiveFunction objective, Vector <double> lowerBound, Vector <double> upperBound, Vector <double> initialGuess)
        {
            _lowerBound = lowerBound;
            _upperBound = upperBound;
            if (!objective.IsGradientSupported)
            {
                throw new IncompatibleObjectiveException("Gradient not supported in objective function, but required for BFGS minimization.");
            }

            // Check that dimensions match
            if (lowerBound.Count != upperBound.Count || lowerBound.Count != initialGuess.Count)
            {
                throw new ArgumentException("Dimensions of bounds and/or initial guess do not match.");
            }

            // Check that initial guess is feasible
            for (int ii = 0; ii < initialGuess.Count; ++ii)
            {
                if (initialGuess[ii] < lowerBound[ii] || initialGuess[ii] > upperBound[ii])
                {
                    throw new ArgumentException("Initial guess is not in the feasible region");
                }
            }

            objective.EvaluateAt(initialGuess);
            ValidateGradientAndObjective(objective);

            // Check that we're not already done
            ExitCondition currentExitCondition = ExitCriteriaSatisfied(objective, null, 0);

            if (currentExitCondition != ExitCondition.None)
            {
                return(new MinimizationResult(objective, 0, currentExitCondition));
            }

            // Set up line search algorithm
            var lineSearcher = new StrongWolfeLineSearch(1e-4, 0.9, Math.Max(ParameterTolerance, 1e-5), maxIterations: 1000);

            // Declare state variables
            Vector <double> reducedSolution1, reducedGradient, reducedInitialPoint, reducedCauchyPoint, solution1;
            Matrix <double> reducedHessian;
            List <int>      reducedMap;

            // First step
            var pseudoHessian = CreateMatrix.DiagonalIdentity <double>(initialGuess.Count);

            // Determine active set
            var gradientProjectionResult = QuadraticGradientProjectionSearch.Search(objective.Point, objective.Gradient, pseudoHessian, lowerBound, upperBound);
            var cauchyPoint = gradientProjectionResult.CauchyPoint;
            var fixedCount  = gradientProjectionResult.FixedCount;
            var isFixed     = gradientProjectionResult.IsFixed;
            var freeCount   = lowerBound.Count - fixedCount;

            if (freeCount > 0)
            {
                reducedGradient     = new DenseVector(freeCount);
                reducedHessian      = new DenseMatrix(freeCount, freeCount);
                reducedMap          = new List <int>(freeCount);
                reducedInitialPoint = new DenseVector(freeCount);
                reducedCauchyPoint  = new DenseVector(freeCount);

                CreateReducedData(objective.Point, cauchyPoint, isFixed, lowerBound, upperBound, objective.Gradient, pseudoHessian, reducedInitialPoint, reducedCauchyPoint, reducedGradient, reducedHessian, reducedMap);

                // Determine search direction and maximum step size
                reducedSolution1 = reducedInitialPoint + reducedHessian.Cholesky().Solve(-reducedGradient);

                solution1 = ReducedToFull(reducedMap, reducedSolution1, cauchyPoint);
            }
            else
            {
                solution1 = cauchyPoint;
            }

            var directionFromCauchy    = solution1 - cauchyPoint;
            var maxStepFromCauchyPoint = FindMaxStep(cauchyPoint, directionFromCauchy, lowerBound, upperBound);

            var solution2 = cauchyPoint + Math.Min(maxStepFromCauchyPoint, 1.0) * directionFromCauchy;

            var lineSearchDirection = solution2 - objective.Point;
            var maxLineSearchStep   = FindMaxStep(objective.Point, lineSearchDirection, lowerBound, upperBound);
            var estStepSize         = -objective.Gradient * lineSearchDirection / (lineSearchDirection * pseudoHessian * lineSearchDirection);

            var startingStepSize = Math.Min(Math.Max(estStepSize, 1.0), maxLineSearchStep);

            // Line search
            LineSearchResult lineSearchResult;

            try
            {
                lineSearchResult = lineSearcher.FindConformingStep(objective, lineSearchDirection, startingStepSize, upperBound: maxLineSearchStep);
            }
            catch (Exception e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }

            var previousPoint  = objective.Fork();
            var candidatePoint = lineSearchResult.FunctionInfoAtMinimum;
            var gradient       = candidatePoint.Gradient;
            var step           = candidatePoint.Point - initialGuess;

            // Subsequent steps
            int totalLineSearchSteps = lineSearchResult.Iterations;
            int iterationsWithNontrivialLineSearch = lineSearchResult.Iterations > 0 ? 0 : 1;

            int iterations = DoBfgsUpdate(ref currentExitCondition, lineSearcher, ref pseudoHessian, ref lineSearchDirection, ref previousPoint, ref lineSearchResult, ref candidatePoint, ref step, ref totalLineSearchSteps, ref iterationsWithNontrivialLineSearch);

            if (iterations == MaximumIterations && currentExitCondition == ExitCondition.None)
            {
                throw new MaximumIterationsException(string.Format("Maximum iterations ({0}) reached.", MaximumIterations));
            }

            return(new MinimizationWithLineSearchResult(candidatePoint, iterations, currentExitCondition, totalLineSearchSteps, iterationsWithNontrivialLineSearch));
        }
 public MinimizationResult1D(IEvaluation1D functionInfo, int iterations, ExitCondition reasonForExit)
 {
     FunctionInfoAtMinimum = functionInfo;
     Iterations            = iterations;
     ReasonForExit         = reasonForExit;
 }
示例#16
0
 public LineSearchOutput(IEvaluation function_info, int iterations, double final_step, ExitCondition reason_for_exit)
     : base(function_info, iterations, reason_for_exit)
 {
     this.FinalStep = final_step;
 }
示例#17
0
 public MinimizationResult(IObjectiveFunction functionInfo, int iterations, ExitCondition reasonForExit)
 {
     FunctionInfoAtMinimum = functionInfo;
     Iterations            = iterations;
     ReasonForExit         = reasonForExit;
 }
示例#18
0
 public ExitStateCondition(ExitCondition test, State state)
 {
     ExitTest = test;
     ToState = state;
 }
示例#19
0
        /// <summary>
        /// Find the minimum of the objective function given lower and upper bounds
        /// </summary>
        /// <param name="objective">The objective function, must support a gradient</param>
        /// <param name="initialGuess">The initial guess</param>
        /// <returns>The MinimizationResult which contains the minimum and the ExitCondition</returns>
        public MinimizationResult FindMinimum(IObjectiveFunction objective, Vector <double> initialGuess)
        {
            if (!objective.IsGradientSupported)
            {
                throw new IncompatibleObjectiveException("Gradient not supported in objective function, but required for L-BFGS minimization.");
            }

            objective.EvaluateAt(initialGuess);
            ValidateGradientAndObjective(objective);

            // Check that we're not already done
            ExitCondition currentExitCondition = ExitCriteriaSatisfied(objective, null, 0);

            if (currentExitCondition != ExitCondition.None)
            {
                return(new MinimizationResult(objective, 0, currentExitCondition));
            }

            // Set up line search algorithm
            var lineSearcher = new WeakWolfeLineSearch(1e-4, 0.9, Math.Max(ParameterTolerance, 1e-10), 1000);

            // First step

            var lineSearchDirection = -objective.Gradient;
            var stepSize            = (100 * GradientTolerance) / (lineSearchDirection * lineSearchDirection);

            var previousPoint = objective;

            LineSearchResult lineSearchResult;

            try
            {
                lineSearchResult = lineSearcher.FindConformingStep(objective, lineSearchDirection, stepSize);
            }
            catch (OptimizationException e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }
            catch (ArgumentException e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }

            var candidate = lineSearchResult.FunctionInfoAtMinimum;

            ValidateGradientAndObjective(candidate);

            var gradient  = candidate.Gradient;
            var step      = candidate.Point - initialGuess;
            var yk        = candidate.Gradient - previousPoint.Gradient;
            var ykhistory = new List <Vector <double> >()
            {
                yk
            };
            var skhistory = new List <Vector <double> >()
            {
                step
            };
            var rhokhistory = new List <double>()
            {
                1.0 / yk.DotProduct(step)
            };

            // Subsequent steps
            int iterations           = 1;
            int totalLineSearchSteps = lineSearchResult.Iterations;
            int iterationsWithNontrivialLineSearch = lineSearchResult.Iterations > 0 ? 0 : 1;

            previousPoint = candidate;
            while ((iterations++ < MaximumIterations) && (previousPoint.Gradient.Norm(2) >= GradientTolerance))
            {
                lineSearchDirection = -ApplyLbfgsUpdate(previousPoint, ykhistory, skhistory, rhokhistory);
                var directionalDerivative = previousPoint.Gradient.DotProduct(lineSearchDirection);
                if (directionalDerivative > 0)
                {
                    throw new InnerOptimizationException("Direction is not a descent direction.");
                }
                try
                {
                    lineSearchResult = lineSearcher.FindConformingStep(previousPoint, lineSearchDirection, 1.0);
                }
                catch (OptimizationException e)
                {
                    throw new InnerOptimizationException("Line search failed.", e);
                }
                catch (ArgumentException e)
                {
                    throw new InnerOptimizationException("Line search failed.", e);
                }
                iterationsWithNontrivialLineSearch += lineSearchResult.Iterations > 0 ? 1 : 0;
                totalLineSearchSteps += lineSearchResult.Iterations;

                candidate            = lineSearchResult.FunctionInfoAtMinimum;
                currentExitCondition = ExitCriteriaSatisfied(candidate, previousPoint, iterations);
                if (currentExitCondition != ExitCondition.None)
                {
                    break;
                }
                step = candidate.Point - previousPoint.Point;
                yk   = candidate.Gradient - previousPoint.Gradient;
                ykhistory.Add(yk);
                skhistory.Add(step);
                rhokhistory.Add(1.0 / yk.DotProduct(step));
                previousPoint = candidate;
                if (ykhistory.Count > Memory)
                {
                    ykhistory.RemoveAt(0);
                    skhistory.RemoveAt(0);
                    rhokhistory.RemoveAt(0);
                }
            }

            if ((iterations == MaximumIterations) && (currentExitCondition == ExitCondition.None))
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached.", MaximumIterations));
            }

            return(new MinimizationWithLineSearchResult(candidate, iterations, ExitCondition.AbsoluteGradient, totalLineSearchSteps, iterationsWithNontrivialLineSearch));
        }
        /// <summary>
        /// Finds the minimum of the objective function with an intial pertubation
        /// </summary>
        /// <param name="objectiveFunction">The objective function, no gradient or hessian needed</param>
        /// <param name="initialGuess">The intial guess</param>
        /// <param name="initalPertubation">The inital pertubation</param>
        /// <returns>The minimum point</returns>
        public MinimizationResult FindMinimum(IObjectiveFunction objectiveFunction, Vector <double> initialGuess, Vector <double> initalPertubation)
        {
            // confirm that we are in a position to commence
            if (objectiveFunction == null)
            {
                throw new ArgumentNullException("objectiveFunction", "ObjectiveFunction must be set to a valid ObjectiveFunctionDelegate");
            }

            if (initialGuess == null)
            {
                throw new ArgumentNullException("initialGuess", "initialGuess must be initialized");
            }

            if (initialGuess == null)
            {
                throw new ArgumentNullException("initalPertubation", "initalPertubation must be initialized, if unknown use overloaded version of FindMinimum()");
            }

            SimplexConstant[] simplexConstants = SimplexConstant.CreateSimplexConstantsFromVectors(initialGuess, initalPertubation);

            // create the initial simplex
            int numDimensions = simplexConstants.Length;
            int numVertices   = numDimensions + 1;

            Vector <double>[] vertices    = InitializeVertices(simplexConstants);
            double[]          errorValues = new double[numVertices];

            int           evaluationCount = 0;
            ExitCondition exitCondition   = ExitCondition.None;
            ErrorProfile  errorProfile;

            errorValues = InitializeErrorValues(vertices, objectiveFunction);

            // iterate until we converge, or complete our permitted number of iterations
            while (true)
            {
                errorProfile = EvaluateSimplex(errorValues);

                // see if the range in point heights is small enough to exit
                if (HasConverged(ConvergenceTolerance, errorProfile, errorValues))
                {
                    exitCondition = ExitCondition.Converged;
                    break;
                }

                // attempt a reflection of the simplex
                double reflectionPointValue = TryToScaleSimplex(-1.0, ref errorProfile, vertices, errorValues, objectiveFunction);
                ++evaluationCount;
                if (reflectionPointValue <= errorValues[errorProfile.LowestIndex])
                {
                    // it's better than the best point, so attempt an expansion of the simplex
                    double expansionPointValue = TryToScaleSimplex(2.0, ref errorProfile, vertices, errorValues, objectiveFunction);
                    ++evaluationCount;
                }
                else if (reflectionPointValue >= errorValues[errorProfile.NextHighestIndex])
                {
                    // it would be worse than the second best point, so attempt a contraction to look
                    // for an intermediate point
                    double currentWorst          = errorValues[errorProfile.HighestIndex];
                    double contractionPointValue = TryToScaleSimplex(0.5, ref errorProfile, vertices, errorValues, objectiveFunction);
                    ++evaluationCount;
                    if (contractionPointValue >= currentWorst)
                    {
                        // that would be even worse, so let's try to contract uniformly towards the low point;
                        // don't bother to update the error profile, we'll do it at the start of the
                        // next iteration
                        ShrinkSimplex(errorProfile, vertices, errorValues, objectiveFunction);
                        evaluationCount += numVertices; // that required one function evaluation for each vertex; keep track
                    }
                }
                // check to see if we have exceeded our alloted number of evaluations
                if (evaluationCount >= MaximumIterations)
                {
                    throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached.", MaximumIterations));
                }
            }
            var regressionResult = new MinimizationResult(objectiveFunction, evaluationCount, exitCondition);

            return(regressionResult);
        }
示例#21
0
 /// <summary>Creates a new <see cref="ClosedNewtonCotesFormula"/> object.
 /// </summary>
 /// <param name="rule">The Newton-Cotes rule, i.e. the degree of the Newton-Cotes method.</param>
 /// <param name="exitCondition">The exit condition.</param>
 /// <returns>A new <see cref="ClosedNewtonCotesFormula"/> object.</returns>
 public static ClosedNewtonCotesFormula Create(ClosedNewtonCotesFormula.Rule rule, ExitCondition exitCondition)
 {
     return(new ClosedNewtonCotesFormula(rule, exitCondition));
 }
示例#22
0
        /// <summary>
        /// Find the minimum of the objective function given lower and upper bounds
        /// </summary>
        /// <param name="objective">The objective function, must support a gradient</param>
        /// <param name="initialGuess">The initial guess</param>
        /// <returns>The MinimizationResult which contains the minimum and the ExitCondition</returns>
        public MinimizationResult FindMinimum(IObjectiveFunction objective, Vector <double> initialGuess)
        {
            if (!objective.IsGradientSupported)
            {
                throw new IncompatibleObjectiveException("Gradient not supported in objective function, but required for BFGS minimization.");
            }

            objective.EvaluateAt(initialGuess);
            ValidateGradientAndObjective(objective);

            // Check that we're not already done
            ExitCondition currentExitCondition = ExitCriteriaSatisfied(objective, null, 0);

            if (currentExitCondition != ExitCondition.None)
            {
                return(new MinimizationResult(objective, 0, currentExitCondition));
            }

            // Set up line search algorithm
            var lineSearcher = new WeakWolfeLineSearch(1e-4, 0.9, Math.Max(ParameterTolerance, 1e-10), 1000);

            // First step
            var inversePseudoHessian = CreateMatrix.DenseIdentity <double>(initialGuess.Count);
            var lineSearchDirection  = -objective.Gradient;
            var stepSize             = 100 * GradientTolerance / (lineSearchDirection * lineSearchDirection);

            var previousPoint = objective;

            LineSearchResult lineSearchResult;

            try
            {
                lineSearchResult = lineSearcher.FindConformingStep(objective, lineSearchDirection, stepSize);
            }
            catch (OptimizationException e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }
            catch (ArgumentException e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }

            var candidate = lineSearchResult.FunctionInfoAtMinimum;

            ValidateGradientAndObjective(candidate);

            var gradient = candidate.Gradient;
            var step     = candidate.Point - initialGuess;

            // Subsequent steps
            Matrix <double> I = CreateMatrix.DiagonalIdentity <double>(initialGuess.Count);
            int             iterations;
            int             totalLineSearchSteps = lineSearchResult.Iterations;
            int             iterationsWithNontrivialLineSearch = lineSearchResult.Iterations > 0 ? 0 : 1;

            iterations = DoBfgsUpdate(ref currentExitCondition, lineSearcher, ref inversePseudoHessian, ref lineSearchDirection, ref previousPoint, ref lineSearchResult, ref candidate, ref step, ref totalLineSearchSteps, ref iterationsWithNontrivialLineSearch);

            if (iterations == MaximumIterations && currentExitCondition == ExitCondition.None)
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached.", MaximumIterations));
            }

            return(new MinimizationWithLineSearchResult(candidate, iterations, ExitCondition.AbsoluteGradient, totalLineSearchSteps, iterationsWithNontrivialLineSearch));
        }
示例#23
0
        public MinimizationOutput FindMinimum(IObjectiveFunction objective, Vector <double> initial_guess)
        {
            if (!objective.GradientSupported)
            {
                throw new IncompatibleObjectiveException("Gradient not supported in objective function, but required for BFGS minimization.");
            }

            if (!(objective is ObjectiveChecker))
            {
                objective = new ObjectiveChecker(objective, this.ValidateObjective, this.ValidateGradient, null);
            }

            IEvaluation initial_eval = objective.Evaluate(initial_guess);

            // Check that we're not already done
            ExitCondition current_exit_condition = this.ExitCriteriaSatisfied(initial_eval, null);

            if (current_exit_condition != ExitCondition.None)
            {
                return(new MinimizationOutput(initial_eval, 0, current_exit_condition));
            }

            // Set up line search algorithm
            var line_searcher = new WeakWolfeLineSearch(1e-4, 0.9, Math.Max(this.ParameterTolerance, 1e-10), max_iterations: 1000);

            // Declare state variables
            IEvaluation     candidate_point, previous_point;
            double          step_size;
            Vector <double> gradient, step, search_direction;
            Matrix <double> inverse_pseudo_hessian;

            // First step
            inverse_pseudo_hessian = LinearAlgebra.Double.DiagonalMatrix.CreateIdentity(initial_guess.Count);
            search_direction       = -initial_eval.Gradient;
            step_size = 100 * this.GradientTolerance / (search_direction * search_direction);

            LineSearchOutput result;

            try
            {
                result = line_searcher.FindConformingStep(objective, initial_eval, search_direction, step_size);
            }
            catch (Exception e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }

            previous_point  = initial_eval;
            candidate_point = result.FunctionInfoAtMinimum;
            gradient        = candidate_point.Gradient;
            step            = candidate_point.Point - initial_guess;
            step_size       = result.FinalStep;

            // Subsequent steps
            Matrix <double> I = LinearAlgebra.Double.DiagonalMatrix.CreateIdentity(initial_guess.Count);
            int             iterations;
            int             total_line_search_steps = result.Iterations;
            int             iterations_with_nontrivial_line_search = result.Iterations > 0 ? 0 : 1;
            int             steepest_descent_resets = 0;

            for (iterations = 1; iterations < this.MaximumIterations; ++iterations)
            {
                var y = candidate_point.Gradient - previous_point.Gradient;

                double sy = step * y;
                inverse_pseudo_hessian = inverse_pseudo_hessian + ((sy + y * inverse_pseudo_hessian * y) / Math.Pow(sy, 2.0)) * step.OuterProduct(step) - ((inverse_pseudo_hessian * y.ToColumnMatrix()) * step.ToRowMatrix() + step.ToColumnMatrix() * (y.ToRowMatrix() * inverse_pseudo_hessian)) * (1.0 / sy);
                search_direction       = -inverse_pseudo_hessian * candidate_point.Gradient;

                if (search_direction * candidate_point.Gradient >= 0.0)
                {
                    search_direction         = -candidate_point.Gradient;
                    inverse_pseudo_hessian   = LinearAlgebra.Double.DiagonalMatrix.CreateIdentity(initial_guess.Count);
                    steepest_descent_resets += 1;
                }
                //else if (search_direction * candidate_point.Gradient >= -this.GradientTolerance*this.GradientTolerance)
                //{
                //    search_direction = -candidate_point.Gradient;
                //    inverse_pseudo_hessian = LinearAlgebra.Double.DiagonalMatrix.Identity(initial_guess.Count);
                //    steepest_descent_resets += 1;
                //}

                try
                {
                    result = line_searcher.FindConformingStep(objective, candidate_point, search_direction, 1.0);
                }
                catch (Exception e)
                {
                    throw new InnerOptimizationException("Line search failed.", e);
                }

                iterations_with_nontrivial_line_search += result.Iterations > 0 ? 1 : 0;
                total_line_search_steps += result.Iterations;

                step_size       = result.FinalStep;
                step            = result.FunctionInfoAtMinimum.Point - candidate_point.Point;
                previous_point  = candidate_point;
                candidate_point = result.FunctionInfoAtMinimum;

                current_exit_condition = this.ExitCriteriaSatisfied(candidate_point, previous_point);
                if (current_exit_condition != ExitCondition.None)
                {
                    break;
                }
            }

            if (iterations == this.MaximumIterations && current_exit_condition == ExitCondition.None)
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached.", this.MaximumIterations));
            }

            return(new MinimizationWithLineSearchOutput(candidate_point, iterations, current_exit_condition, total_line_search_steps, iterations_with_nontrivial_line_search));
        }