gradientNormValue() public method

public gradientNormValue ( ) : double
return double
Beispiel #1
0
        //! minimize the optimization problem P
        public override EndCriteria.Type minimize(Problem P, EndCriteria endCriteria)
        {
            EndCriteria.Type ecType = EndCriteria.Type.None;
            P.reset();
            Vector x_ = P.currentValue();
            int iterationNumber_ = 0;
            int stationaryStateIterationNumber_ = 0;
            lineSearch_.searchDirection = new Vector(x_.Count);
            bool end;

            // function and squared norm of gradient values;
            double normdiff;
            // classical initial value for line-search step
            double t = 1.0;
            // Set gold at the size of the optimization problem search direction
            Vector gold = new Vector(lineSearch_.searchDirection.Count);
            Vector gdiff = new Vector(lineSearch_.searchDirection.Count);

            P.setFunctionValue(P.valueAndGradient(gold, x_));
            lineSearch_.searchDirection = gold*-1.0;
            P.setGradientNormValue(Vector.DotProduct(gold, gold));
            normdiff = Math.Sqrt(P.gradientNormValue());

            do
            {
                // Linesearch
                t = lineSearch_.value(P, ref ecType, endCriteria, t);

                if (!(lineSearch_.succeed()))
                    throw new ApplicationException("line-search failed!");

                // End criteria
                // FIXME: it's never been used! ???
                // , normdiff
                end = endCriteria.value(iterationNumber_, ref stationaryStateIterationNumber_, true, P.functionValue(), Math.Sqrt(P.gradientNormValue()), lineSearch_.lastFunctionValue(), Math.Sqrt(lineSearch_.lastGradientNorm2()), ref ecType);

                // Updates
                // New point
                x_ = lineSearch_.lastX();
                // New function value
                P.setFunctionValue(lineSearch_.lastFunctionValue());
                // New gradient and search direction vectors
                gdiff = gold - lineSearch_.lastGradient();
                normdiff = Math.Sqrt(Vector.DotProduct(gdiff, gdiff));
                gold = lineSearch_.lastGradient();
                lineSearch_.searchDirection = gold*-1.0;
                // New gradient squared norm
                P.setGradientNormValue(lineSearch_.lastGradientNorm2());

                // Increase interation number
                ++iterationNumber_;
            } while (end == false);

            P.setCurrentValue(x_);
            return ecType;

        }
Beispiel #2
0
        //! Perform line search
        public override double value(Problem P, ref EndCriteria.Type ecType, EndCriteria endCriteria, double t_ini)
        {
            //OptimizationMethod& method = P.method();
            Constraint constraint = P.constraint();

            succeed_ = true;
            bool   maxIter = false;
            double qtold;
            double t          = t_ini;
            int    loopNumber = 0;

            double q0  = P.functionValue();
            double qp0 = P.gradientNormValue();

            qt_  = q0;
            qpt_ = (gradient_.Count == 0) ? qp0 : -Vector.DotProduct(gradient_, searchDirection_);

            // Initialize gradient
            gradient_ = new Vector(P.currentValue().Count);
            // Compute new point
            xtd_ = (Vector)P.currentValue().Clone();
            t    = update(ref xtd_, searchDirection_, t, constraint);
            // Compute function value at the new point
            qt_ = P.value(xtd_);

            // Enter in the loop if the criterion is not satisfied
            if ((qt_ - q0) > -alpha_ * t * qpt_)
            {
                do
                {
                    loopNumber++;
                    // Decrease step
                    t *= beta_;
                    // Store old value of the function
                    qtold = qt_;
                    // New point value
                    xtd_ = P.currentValue();
                    t    = update(ref xtd_, searchDirection_, t, constraint);

                    // Compute function value at the new point
                    qt_ = P.value(xtd_);
                    P.gradient(gradient_, xtd_);
                    // and it squared norm
                    maxIter = endCriteria.checkMaxIterations(loopNumber, ref ecType);
                } while ((((qt_ - q0) > (-alpha_ * t * qpt_)) || ((qtold - q0) <= (-alpha_ * t * qpt_ / beta_))) && (!maxIter));
            }

            if (maxIter)
            {
                succeed_ = false;
            }

            // Compute new gradient
            P.gradient(gradient_, xtd_);
            // and it squared norm
            qpt_ = Vector.DotProduct(gradient_, gradient_);

            // Return new step value
            return(t);
        }
Beispiel #3
0
        //! minimize the optimization problem P
        public override EndCriteria.Type minimize(Problem P, EndCriteria endCriteria)
        {
            EndCriteria.Type ecType = EndCriteria.Type.None;
            P.reset();
            Vector x_ = P.currentValue();
            int    iterationNumber_ = 0;
            int    stationaryStateIterationNumber_ = 0;

            lineSearch_.searchDirection = new Vector(x_.Count);
            bool end;

            // function and squared norm of gradient values;
            double normdiff;
            // classical initial value for line-search step
            double t = 1.0;
            // Set gold at the size of the optimization problem search direction
            Vector gold  = new Vector(lineSearch_.searchDirection.Count);
            Vector gdiff = new Vector(lineSearch_.searchDirection.Count);

            P.setFunctionValue(P.valueAndGradient(gold, x_));
            lineSearch_.searchDirection = gold * -1.0;
            P.setGradientNormValue(Vector.DotProduct(gold, gold));
            normdiff = Math.Sqrt(P.gradientNormValue());

            do
            {
                // Linesearch
                t = lineSearch_.value(P, ref ecType, endCriteria, t);

                if (!(lineSearch_.succeed()))
                {
                    throw new ApplicationException("line-search failed!");
                }

                // End criteria
                // FIXME: it's never been used! ???
                // , normdiff
                end = endCriteria.value(iterationNumber_, ref stationaryStateIterationNumber_, true, P.functionValue(), Math.Sqrt(P.gradientNormValue()), lineSearch_.lastFunctionValue(), Math.Sqrt(lineSearch_.lastGradientNorm2()), ref ecType);

                // Updates
                // New point
                x_ = lineSearch_.lastX();
                // New function value
                P.setFunctionValue(lineSearch_.lastFunctionValue());
                // New gradient and search direction vectors
                gdiff    = gold - lineSearch_.lastGradient();
                normdiff = Math.Sqrt(Vector.DotProduct(gdiff, gdiff));
                gold     = lineSearch_.lastGradient();
                lineSearch_.searchDirection = gold * -1.0;
                // New gradient squared norm
                P.setGradientNormValue(lineSearch_.lastGradientNorm2());

                // Increase interation number
                ++iterationNumber_;
            } while (end == false);

            P.setCurrentValue(x_);
            return(ecType);
        }
Beispiel #4
0
        //! Perform line search
        public override double value(Problem P, ref EndCriteria.Type ecType, EndCriteria endCriteria, double t_ini) {
            //OptimizationMethod& method = P.method();
            Constraint constraint = P.constraint();
            succeed_ = true;
            bool maxIter = false;
            double qtold;
            double t = t_ini;
            int loopNumber = 0;

            double q0 = P.functionValue();
            double qp0 = P.gradientNormValue();

            qt_ = q0;
            qpt_ = (gradient_.Count == 0) ? qp0 : -Vector.DotProduct(gradient_, searchDirection_);

            // Initialize gradient
            gradient_ = new Vector(P.currentValue().Count);
            // Compute new point
            xtd_ = (Vector)P.currentValue().Clone();
            t = update(ref xtd_, searchDirection_, t, constraint);
            // Compute function value at the new point
            qt_ = P.value(xtd_);

            // Enter in the loop if the criterion is not satisfied
            if ((qt_ - q0) > -alpha_ * t * qpt_) {
                do {
                    loopNumber++;
                    // Decrease step
                    t *= beta_;
                    // Store old value of the function
                    qtold = qt_;
                    // New point value
                    xtd_ = P.currentValue();
                    t = update(ref xtd_, searchDirection_, t, constraint);

                    // Compute function value at the new point
                    qt_ = P.value(xtd_);
                    P.gradient(gradient_, xtd_);
                    // and it squared norm
                    maxIter = endCriteria.checkMaxIterations(loopNumber, ref ecType);
                } while ((((qt_ - q0) > (-alpha_ * t * qpt_)) || ((qtold - q0) <= (-alpha_ * t * qpt_ / beta_))) && (!maxIter));
            }

            if (maxIter)
                succeed_ = false;

            // Compute new gradient
            P.gradient(gradient_, xtd_);
            // and it squared norm
            qpt_ = Vector.DotProduct(gradient_, gradient_);

            // Return new step value
            return t;
        }
Beispiel #5
0
        //! solve the optimization problem P
        public override EndCriteria.Type minimize(Problem P, EndCriteria endCriteria)
        {
            // Initializations
            double ftol = endCriteria.functionEpsilon();
            int maxStationaryStateIterations_ = endCriteria.maxStationaryStateIterations();
            EndCriteria.Type ecType = EndCriteria.Type.None; // reset end criteria
            P.reset(); // reset problem
            Vector x_ = P.currentValue(); // store the starting point
            int iterationNumber_ =0; // stationaryStateIterationNumber_=0
            lineSearch_.searchDirection = new Vector(x_.Count); // dimension line search
            bool done = false;

            // function and squared norm of gradient values;
            double fnew;
            double fold;
            double gold2;
            double c;
            double fdiff;
            double normdiff;
            // classical initial value for line-search step
            double t = 1.0;
            // Set gradient g at the size of the optimization problem search direction
            int sz = lineSearch_.searchDirection.Count;
            Vector g = new Vector(sz);
            Vector d = new Vector(sz);
            Vector sddiff = new Vector(sz);
            // Initialize cost function, gradient g and search direction
            P.setFunctionValue(P.valueAndGradient(g, x_));
            P.setGradientNormValue(Vector.DotProduct(g, g));
            lineSearch_.searchDirection = g * -1.0;
            // Loop over iterations
            do
            {
                // Linesearch
                t = lineSearch_.value(P, ref ecType, endCriteria, t);
                // don't throw: it can fail just because maxIterations exceeded
                //QL_REQUIRE(lineSearch_->succeed(), "line-search failed!");
                if (lineSearch_.succeed())
                {
                    // Updates
                    d = lineSearch_.searchDirection;
                    // New point
                    x_ = lineSearch_.lastX();
                    // New function value
                    fold = P.functionValue();
                    P.setFunctionValue(lineSearch_.lastFunctionValue());
                    // New gradient and search direction vectors
                    g = lineSearch_.lastGradient();
                    // orthogonalization coef
                    gold2 = P.gradientNormValue();
                    P.setGradientNormValue(lineSearch_.lastGradientNorm2());
                    c = P.gradientNormValue() / gold2;
                    // conjugate gradient search direction
                    sddiff = ((g*-1.0) + c * d) - lineSearch_.searchDirection;
                    normdiff = Math.Sqrt(Vector.DotProduct(sddiff, sddiff));
                    lineSearch_.searchDirection = (g*-1.0) + c * d;
                    // Now compute accuracy and check end criteria
                    // Numerical Recipes exit strategy on fx (see NR in C++, p.423)
                    fnew = P.functionValue();
                    fdiff = 2.0 *Math.Abs(fnew-fold) / (Math.Abs(fnew) + Math.Abs(fold) + Double.Epsilon);
                    if (fdiff < ftol || endCriteria.checkMaxIterations(iterationNumber_, ref ecType))
                    {
                        endCriteria.checkStationaryFunctionValue(0.0, 0.0, ref maxStationaryStateIterations_, ref ecType);
                        endCriteria.checkMaxIterations(iterationNumber_, ref ecType);
                        return ecType;
                    }
                    //done = endCriteria(iterationNumber_,
                    //                   stationaryStateIterationNumber_,
                    //                   true,  //FIXME: it should be in the problem
                    //                   fold,
                    //                   std::sqrt(gold2),
                    //                   P.functionValue(),
                    //                   std::sqrt(P.gradientNormValue()),
                    //                   ecType);
                    P.setCurrentValue(x_); // update problem current value
                    ++iterationNumber_; // Increase iteration number
                    }
                else
                {
                    done =true;
                }
            } while (!done);
            P.setCurrentValue(x_);
            return ecType;
        }
Beispiel #6
0
        //! solve the optimization problem P
        public override EndCriteria.Type minimize(Problem P, EndCriteria endCriteria)
        {
            // Initializations
            double ftol = endCriteria.functionEpsilon();
            int    maxStationaryStateIterations_ = endCriteria.maxStationaryStateIterations();

            EndCriteria.Type ecType = EndCriteria.Type.None;    // reset end criteria
            P.reset();                                          // reset problem
            Vector x_ = P.currentValue();                       // store the starting point
            int    iterationNumber_ = 0;                        // stationaryStateIterationNumber_=0

            lineSearch_.searchDirection = new Vector(x_.Count); // dimension line search
            bool done = false;

            // function and squared norm of gradient values;
            double fnew;
            double fold;
            double gold2;
            double c;
            double fdiff;
            double normdiff;
            // classical initial value for line-search step
            double t = 1.0;
            // Set gradient g at the size of the optimization problem search direction
            int    sz     = lineSearch_.searchDirection.Count;
            Vector g      = new Vector(sz);
            Vector d      = new Vector(sz);
            Vector sddiff = new Vector(sz);

            // Initialize cost function, gradient g and search direction
            P.setFunctionValue(P.valueAndGradient(g, x_));
            P.setGradientNormValue(Vector.DotProduct(g, g));
            lineSearch_.searchDirection = g * -1.0;
            // Loop over iterations
            do
            {
                // Linesearch
                t = lineSearch_.value(P, ref ecType, endCriteria, t);
                // don't throw: it can fail just because maxIterations exceeded
                //QL_REQUIRE(lineSearch_->succeed(), "line-search failed!");
                if (lineSearch_.succeed())
                {
                    // Updates
                    d = lineSearch_.searchDirection;
                    // New point
                    x_ = lineSearch_.lastX();
                    // New function value
                    fold = P.functionValue();
                    P.setFunctionValue(lineSearch_.lastFunctionValue());
                    // New gradient and search direction vectors
                    g = lineSearch_.lastGradient();
                    // orthogonalization coef
                    gold2 = P.gradientNormValue();
                    P.setGradientNormValue(lineSearch_.lastGradientNorm2());
                    c = P.gradientNormValue() / gold2;
                    // conjugate gradient search direction
                    sddiff   = ((g * -1.0) + c * d) - lineSearch_.searchDirection;
                    normdiff = Math.Sqrt(Vector.DotProduct(sddiff, sddiff));
                    lineSearch_.searchDirection = (g * -1.0) + c * d;
                    // Now compute accuracy and check end criteria
                    // Numerical Recipes exit strategy on fx (see NR in C++, p.423)
                    fnew  = P.functionValue();
                    fdiff = 2.0 * Math.Abs(fnew - fold) / (Math.Abs(fnew) + Math.Abs(fold) + Double.Epsilon);
                    if (fdiff < ftol || endCriteria.checkMaxIterations(iterationNumber_, ref ecType))
                    {
                        endCriteria.checkStationaryFunctionValue(0.0, 0.0, ref maxStationaryStateIterations_, ref ecType);
                        endCriteria.checkMaxIterations(iterationNumber_, ref ecType);
                        return(ecType);
                    }
                    //done = endCriteria(iterationNumber_,
                    //                   stationaryStateIterationNumber_,
                    //                   true,  //FIXME: it should be in the problem
                    //                   fold,
                    //                   std::sqrt(gold2),
                    //                   P.functionValue(),
                    //                   std::sqrt(P.gradientNormValue()),
                    //                   ecType);
                    P.setCurrentValue(x_);                  // update problem current value
                    ++iterationNumber_;                     // Increase iteration number
                }
                else
                {
                    done = true;
                }
            } while (!done);
            P.setCurrentValue(x_);
            return(ecType);
        }
Beispiel #7
0
        public override EndCriteria.Type minimize(Problem P, EndCriteria endCriteria)
        {
            // Initializations
            double ftol = endCriteria.functionEpsilon();
            int    maxStationaryStateIterations_ = endCriteria.maxStationaryStateIterations();

            EndCriteria.Type ecType = EndCriteria.Type.None; // reset end criteria
            P.reset();                                       // reset problem
            Vector x_ = P.currentValue();                    // store the starting point
            int    iterationNumber_ = 0;

            // dimension line search
            lineSearch_.searchDirection = new Vector(x_.size());
            bool done = false;

            // function and squared norm of gradient values
            double fnew, fold, gold2;
            double fdiff;
            // classical initial value for line-search step
            double t = 1.0;
            // Set gradient g at the size of the optimization problem
            // search direction
            int    sz = lineSearch_.searchDirection.size();
            Vector prevGradient = new Vector(sz), d = new Vector(sz), sddiff = new Vector(sz), direction = new Vector(sz);

            // Initialize cost function, gradient prevGradient and search direction
            P.setFunctionValue(P.valueAndGradient(prevGradient, x_));
            P.setGradientNormValue(Vector.DotProduct(prevGradient, prevGradient));
            lineSearch_.searchDirection = prevGradient * -1;

            bool first_time = true;

            // Loop over iterations
            do
            {
                // Linesearch
                if (!first_time)
                {
                    prevGradient = lineSearch_.lastGradient();
                }
                t = (lineSearch_.value(P, ref ecType, endCriteria, t));
                // don't throw: it can fail just because maxIterations exceeded
                if (lineSearch_.succeed())
                {
                    // Updates

                    // New point
                    x_ = lineSearch_.lastX();
                    // New function value
                    fold = P.functionValue();
                    P.setFunctionValue(lineSearch_.lastFunctionValue());
                    // New gradient and search direction vectors

                    // orthogonalization coef
                    gold2 = P.gradientNormValue();
                    P.setGradientNormValue(lineSearch_.lastGradientNorm2());

                    // conjugate gradient search direction
                    direction = getUpdatedDirection(P, gold2, prevGradient);

                    sddiff = direction - lineSearch_.searchDirection;
                    lineSearch_.searchDirection = direction;
                    // Now compute accuracy and check end criteria
                    // Numerical Recipes exit strategy on fx (see NR in C++, p.423)
                    fnew  = P.functionValue();
                    fdiff = 2.0 * Math.Abs(fnew - fold) /
                            (Math.Abs(fnew) + Math.Abs(fold) + Const.QL_EPSILON);
                    if (fdiff < ftol ||
                        endCriteria.checkMaxIterations(iterationNumber_, ref ecType))
                    {
                        endCriteria.checkStationaryFunctionValue(0.0, 0.0, ref maxStationaryStateIterations_, ref ecType);
                        endCriteria.checkMaxIterations(iterationNumber_, ref ecType);
                        return(ecType);
                    }
                    P.setCurrentValue(x_); // update problem current value
                    ++iterationNumber_;    // Increase iteration number
                    first_time = false;
                }
                else
                {
                    done = true;
                }
            }while (!done);
            P.setCurrentValue(x_);
            return(ecType);
        }