/// <summary>
        /// Find the minimum of the objective function given lower and upper bounds
        /// </summary>
        /// <param name="objective">The objective function, must support a gradient</param>
        /// <param name="lowerBound">The lower bound</param>
        /// <param name="upperBound">The upper bound</param>
        /// <param name="initialGuess">The initial guess</param>
        /// <returns>The MinimizationResult which contains the minimum and the ExitCondition</returns>
        public MinimizationResult FindMinimum(IObjectiveFunction objective, Vector <double> lowerBound, Vector <double> upperBound, Vector <double> initialGuess)
        {
            _lowerBound = lowerBound;
            _upperBound = upperBound;
            if (!objective.IsGradientSupported)
            {
                throw new IncompatibleObjectiveException("Gradient not supported in objective function, but required for BFGS minimization.");
            }

            // Check that dimensions match
            if ((lowerBound.Count != upperBound.Count) || (lowerBound.Count != initialGuess.Count))
            {
                throw new ArgumentException("Dimensions of bounds and/or initial guess do not match.");
            }

            // Check that initial guess is feasible
            for (int ii = 0; ii < initialGuess.Count; ++ii)
            {
                if ((initialGuess[ii] < lowerBound[ii]) || (initialGuess[ii] > upperBound[ii]))
                {
                    throw new ArgumentException("Initial guess is not in the feasible region");
                }
            }

            objective.EvaluateAt(initialGuess);
            ValidateGradientAndObjective(objective);

            // Check that we're not already done
            var currentExitCondition = ExitCriteriaSatisfied(objective, null, 0);

            if (currentExitCondition != ExitCondition.None)
            {
                return(new MinimizationResult(objective, 0, currentExitCondition));
            }

            // Set up line search algorithm
            var lineSearcher = new StrongWolfeLineSearch(1e-4, 0.9, Math.Max(ParameterTolerance, 1e-5), maxIterations: 1000);

            // Declare state variables
            Vector <double> reducedSolution1, reducedGradient, reducedInitialPoint, reducedCauchyPoint, solution1;
            Matrix <double> reducedHessian;
            List <int>      reducedMap;

            // First step
            var pseudoHessian = CreateMatrix.DiagonalIdentity <double>(initialGuess.Count);

            // Determine active set
            var gradientProjectionResult = QuadraticGradientProjectionSearch.Search(objective.Point, objective.Gradient, pseudoHessian, lowerBound, upperBound);
            var cauchyPoint = gradientProjectionResult.CauchyPoint;
            var fixedCount  = gradientProjectionResult.FixedCount;
            var isFixed     = gradientProjectionResult.IsFixed;
            var freeCount   = lowerBound.Count - fixedCount;

            if (freeCount > 0)
            {
                reducedGradient     = new DenseVector(freeCount);
                reducedHessian      = new DenseMatrix(freeCount, freeCount);
                reducedMap          = new List <int>(freeCount);
                reducedInitialPoint = new DenseVector(freeCount);
                reducedCauchyPoint  = new DenseVector(freeCount);

                CreateReducedData(objective.Point, cauchyPoint, isFixed, lowerBound, upperBound, objective.Gradient, pseudoHessian, reducedInitialPoint, reducedCauchyPoint, reducedGradient, reducedHessian, reducedMap);

                // Determine search direction and maximum step size
                reducedSolution1 = reducedInitialPoint + reducedHessian.Cholesky().Solve(-reducedGradient);

                solution1 = ReducedToFull(reducedMap, reducedSolution1, cauchyPoint);
            }
            else
            {
                solution1 = cauchyPoint;
            }

            var directionFromCauchy    = solution1 - cauchyPoint;
            var maxStepFromCauchyPoint = FindMaxStep(cauchyPoint, directionFromCauchy, lowerBound, upperBound);

            var solution2 = cauchyPoint + (Math.Min(maxStepFromCauchyPoint, 1.0) * directionFromCauchy);

            var lineSearchDirection = solution2 - objective.Point;
            var maxLineSearchStep   = FindMaxStep(objective.Point, lineSearchDirection, lowerBound, upperBound);
            var estStepSize         = (-objective.Gradient * lineSearchDirection) / (lineSearchDirection * pseudoHessian * lineSearchDirection);

            var startingStepSize = Math.Min(Math.Max(estStepSize, 1.0), maxLineSearchStep);

            // Line search
            LineSearchResult lineSearchResult;

            try
            {
                lineSearchResult = lineSearcher.FindConformingStep(objective, lineSearchDirection, startingStepSize, upperBound: maxLineSearchStep);
            }
            catch (Exception e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }

            var previousPoint  = objective.Fork();
            var candidatePoint = lineSearchResult.FunctionInfoAtMinimum;

            ValidateGradientAndObjective(candidatePoint);

            // Check that we're not done
            currentExitCondition = ExitCriteriaSatisfied(candidatePoint, previousPoint, 0);
            if (currentExitCondition != ExitCondition.None)
            {
                return(new MinimizationResult(candidatePoint, 0, currentExitCondition));
            }

            var gradient = candidatePoint.Gradient;
            var step     = candidatePoint.Point - initialGuess;

            // Subsequent steps
            int totalLineSearchSteps = lineSearchResult.Iterations;
            int iterationsWithNontrivialLineSearch = lineSearchResult.Iterations > 0 ? 0 : 1;

            int iterations = DoBfgsUpdate(ref currentExitCondition, lineSearcher, ref pseudoHessian, ref lineSearchDirection, ref previousPoint, ref lineSearchResult, ref candidatePoint, ref step, ref totalLineSearchSteps, ref iterationsWithNontrivialLineSearch);

            if ((iterations == MaximumIterations) && (currentExitCondition == ExitCondition.None))
            {
                throw new MaximumIterationsException(string.Format("Maximum iterations ({0}) reached.", MaximumIterations));
            }

            return(new MinimizationWithLineSearchResult(candidatePoint, iterations, currentExitCondition, totalLineSearchSteps, iterationsWithNontrivialLineSearch));
        }
Example #2
0
        public MinimizationOutput FindMinimum(IObjectiveFunction objective, Vector <double> lower_bound, Vector <double> upper_bound, Vector <double> initial_guess)
        {
            if (!objective.GradientSupported)
            {
                throw new IncompatibleObjectiveException("Gradient not supported in objective function, but required for BFGS minimization.");
            }

            if (!(objective is ObjectiveChecker))
            {
                objective = new ObjectiveChecker(objective, this.ValidateObjective, this.ValidateGradient, null);
            }

            // Check that dimensions match
            if (lower_bound.Count != upper_bound.Count || lower_bound.Count != initial_guess.Count)
            {
                throw new ArgumentException("Dimensions of bounds and/or initial guess do not match.");
            }

            // Check that initial guess is feasible
            for (int ii = 0; ii < initial_guess.Count; ++ii)
            {
                if (initial_guess[ii] < lower_bound[ii] || initial_guess[ii] > upper_bound[ii])
                {
                    throw new ArgumentException("Initial guess is not in the feasible region");
                }
            }

            IEvaluation initial_eval = objective.Evaluate(initial_guess);

            // Check that we're not already done
            ExitCondition current_exit_condition = this.ExitCriteriaSatisfied(initial_eval, null, lower_bound, upper_bound, 0);

            if (current_exit_condition != ExitCondition.None)
            {
                return(new MinimizationOutput(initial_eval, 0, current_exit_condition));
            }

            // Set up line search algorithm
            var line_searcher = new StrongWolfeLineSearch(1e-4, 0.9, Math.Max(this.ParameterTolerance, 1e-5), max_iterations: 1000);

            // Declare state variables
            IEvaluation     candidate_point, previous_point;
            double          step_size;
            Vector <double> gradient, step, line_search_direction, reduced_solution1, reduced_gradient, reduced_initial_point, reduced_cauchy_point, solution1;
            Matrix <double> pseudo_hessian, reduced_hessian;
            List <int>      reduced_map;

            // First step
            pseudo_hessian = DiagonalMatrix.CreateIdentity(initial_guess.Count);

            // Determine active set
            var gradient_projection_result = QuadraticGradientProjectionSearch.search(initial_eval.Point, initial_eval.Gradient, pseudo_hessian, lower_bound, upper_bound);
            var cauchy_point = gradient_projection_result.Item1;
            var fixed_count  = gradient_projection_result.Item2;
            var is_fixed     = gradient_projection_result.Item3;
            var free_count   = lower_bound.Count - fixed_count;

            if (free_count > 0)
            {
                reduced_gradient      = new DenseVector(free_count);
                reduced_hessian       = new DenseMatrix(free_count, free_count);
                reduced_map           = new List <int>(free_count);
                reduced_initial_point = new DenseVector(free_count);
                reduced_cauchy_point  = new DenseVector(free_count);

                CreateReducedData(initial_eval.Point, cauchy_point, is_fixed, lower_bound, upper_bound, initial_eval.Gradient, pseudo_hessian, reduced_initial_point, reduced_cauchy_point, reduced_gradient, reduced_hessian, reduced_map);

                // Determine search direction and maximum step size
                reduced_solution1 = reduced_initial_point + reduced_hessian.Cholesky().Solve(-reduced_gradient);

                solution1 = reduced_to_full(reduced_map, reduced_solution1, cauchy_point);
            }
            else
            {
                solution1 = cauchy_point;
            }

            var direction_from_cauchy      = solution1 - cauchy_point;
            var max_step_from_cauchy_point = FindMaxStep(cauchy_point, direction_from_cauchy, lower_bound, upper_bound);

            var solution2 = cauchy_point + Math.Min(max_step_from_cauchy_point, 1.0) * direction_from_cauchy;

            line_search_direction = solution2 - initial_eval.Point;
            var max_line_search_step = FindMaxStep(initial_eval.Point, line_search_direction, lower_bound, upper_bound);
            var est_step_size        = -initial_eval.Gradient * line_search_direction / (line_search_direction * pseudo_hessian * line_search_direction);

            var starting_step_size = Math.Min(Math.Max(est_step_size, 1.0), max_line_search_step);

            // Line search
            LineSearchOutput result;

            try
            {
                result = line_searcher.FindConformingStep(objective, initial_eval, line_search_direction, starting_step_size, upper_bound: max_line_search_step);
            }
            catch (Exception e)
            {
                throw new InnerOptimizationException("Line search failed.", e);
            }

            previous_point  = initial_eval;
            candidate_point = result.FunctionInfoAtMinimum;
            gradient        = candidate_point.Gradient;
            step            = candidate_point.Point - initial_guess;
            step_size       = result.FinalStep;

            // Subsequent steps
            int iterations;
            int total_line_search_steps = result.Iterations;
            int iterations_with_nontrivial_line_search = result.Iterations > 0 ? 0 : 1;
            int steepest_descent_resets = 0;

            for (iterations = 1; iterations < this.MaximumIterations; ++iterations)
            {
                // Do BFGS update
                var y = candidate_point.Gradient - previous_point.Gradient;

                double sy = step * y;
                if (sy > 0.0) // only do update if it will create a positive definite matrix
                {
                    double sts = step * step;
                    //inverse_pseudo_hessian = inverse_pseudo_hessian + ((sy + y * inverse_pseudo_hessian * y) / Math.Pow(sy, 2.0)) * step.OuterProduct(step) - ((inverse_pseudo_hessian * y.ToColumnMatrix()) * step.ToRowMatrix() + step.ToColumnMatrix() * (y.ToRowMatrix() * inverse_pseudo_hessian)) * (1.0 / sy);
                    var Hs  = pseudo_hessian * step;
                    var sHs = step * pseudo_hessian * step;
                    pseudo_hessian = pseudo_hessian + y.OuterProduct(y) * (1.0 / sy) - Hs.OuterProduct(Hs) * (1.0 / sHs);
                }
                else
                {
                    steepest_descent_resets += 1;
                    //pseudo_hessian = LinearAlgebra.Double.DiagonalMatrix.Identity(initial_guess.Count);
                }

                // Determine active set
                gradient_projection_result = QuadraticGradientProjectionSearch.search(candidate_point.Point, candidate_point.Gradient, pseudo_hessian, lower_bound, upper_bound);
                cauchy_point = gradient_projection_result.Item1;
                fixed_count  = gradient_projection_result.Item2;
                is_fixed     = gradient_projection_result.Item3;
                free_count   = lower_bound.Count - fixed_count;

                if (free_count > 0)
                {
                    reduced_gradient      = new DenseVector(free_count);
                    reduced_hessian       = new DenseMatrix(free_count, free_count);
                    reduced_map           = new List <int>(free_count);
                    reduced_initial_point = new DenseVector(free_count);
                    reduced_cauchy_point  = new DenseVector(free_count);

                    CreateReducedData(candidate_point.Point, cauchy_point, is_fixed, lower_bound, upper_bound, candidate_point.Gradient, pseudo_hessian, reduced_initial_point, reduced_cauchy_point, reduced_gradient, reduced_hessian, reduced_map);

                    // Determine search direction and maximum step size
                    reduced_solution1 = reduced_initial_point + reduced_hessian.Cholesky().Solve(-reduced_gradient);

                    solution1 = reduced_to_full(reduced_map, reduced_solution1, cauchy_point);
                }
                else
                {
                    solution1 = cauchy_point;
                }

                direction_from_cauchy      = solution1 - cauchy_point;
                max_step_from_cauchy_point = FindMaxStep(cauchy_point, direction_from_cauchy, lower_bound, upper_bound);
                //var cauchy_eval = objective.Evaluate(cauchy_point);

                solution2 = cauchy_point + Math.Min(max_step_from_cauchy_point, 1.0) * direction_from_cauchy;

                line_search_direction = solution2 - candidate_point.Point;
                max_line_search_step  = FindMaxStep(candidate_point.Point, line_search_direction, lower_bound, upper_bound);

                //line_search_direction = solution1 - candidate_point.Point;
                //max_line_search_step = FindMaxStep(candidate_point.Point, line_search_direction, lower_bound, upper_bound);

                if (max_line_search_step == 0.0)
                {
                    line_search_direction = cauchy_point - candidate_point.Point;
                    max_line_search_step  = FindMaxStep(candidate_point.Point, line_search_direction, lower_bound, upper_bound);
                }

                est_step_size = -candidate_point.Gradient * line_search_direction / (line_search_direction * pseudo_hessian * line_search_direction);

                starting_step_size = Math.Min(Math.Max(est_step_size, 1.0), max_line_search_step);

                // Line search
                try
                {
                    result = line_searcher.FindConformingStep(objective, candidate_point, line_search_direction, starting_step_size, upper_bound: max_line_search_step);
                    //result = line_searcher.FindConformingStep(objective, cauchy_eval, direction_from_cauchy, Math.Min(1.0, max_step_from_cauchy_point), upper_bound: max_step_from_cauchy_point);
                }
                catch (Exception e)
                {
                    throw new InnerOptimizationException("Line search failed.", e);
                }

                iterations_with_nontrivial_line_search += result.Iterations > 0 ? 1 : 0;
                total_line_search_steps += result.Iterations;

                step_size       = result.FinalStep;
                step            = result.FunctionInfoAtMinimum.Point - candidate_point.Point;
                previous_point  = candidate_point;
                candidate_point = result.FunctionInfoAtMinimum;

                current_exit_condition = this.ExitCriteriaSatisfied(candidate_point, previous_point, lower_bound, upper_bound, iterations);
                if (current_exit_condition != ExitCondition.None)
                {
                    break;
                }
            }

            if (iterations == this.MaximumIterations && current_exit_condition == ExitCondition.None)
            {
                throw new MaximumIterationsException(String.Format("Maximum iterations ({0}) reached.", this.MaximumIterations));
            }

            return(new MinimizationWithLineSearchOutput(candidate_point, iterations, current_exit_condition, total_line_search_steps, iterations_with_nontrivial_line_search));
        }