Exemplo n.º 1
0
        //! Perform line search
        public override double value(Problem P, ref EndCriteria.Type ecType, EndCriteria endCriteria, double t_ini)
        {
            Constraint constraint = P.constraint();

            succeed_ = true;
            bool   maxIter = false;
            double qtold;
            double t          = t_ini;
            int    loopNumber = 0;

            double q0  = P.functionValue();
            double qp0 = P.gradientNormValue();

            qt_  = q0;
            qpt_ = (gradient_.Count == 0) ? qp0 : -Vector.DotProduct(gradient_, searchDirection_);

            // Initialize gradient
            gradient_ = new Vector(P.currentValue().Count);
            // Compute new point
            xtd_ = P.currentValue().Clone();
            t    = update(ref xtd_, searchDirection_, t, constraint);
            // Compute function value at the new point
            qt_ = P.value(xtd_);

            // Enter in the loop if the criterion is not satisfied
            if ((qt_ - q0) > -alpha_ * t * qpt_)
            {
                do
                {
                    loopNumber++;
                    // Decrease step
                    t *= beta_;
                    // Store old value of the function
                    qtold = qt_;
                    // New point value
                    xtd_ = P.currentValue();
                    t    = update(ref xtd_, searchDirection_, t, constraint);

                    // Compute function value at the new point
                    qt_ = P.value(xtd_);
                    P.gradient(ref gradient_, xtd_);
                    // and it squared norm
                    maxIter = endCriteria.checkMaxIterations(loopNumber, ref ecType);
                }while ((((qt_ - q0) > (-alpha_ * t * qpt_)) || ((qtold - q0) <= (-alpha_ * t * qpt_ / beta_))) &&
                        (!maxIter));
            }

            if (maxIter)
            {
                succeed_ = false;
            }

            // Compute new gradient
            P.gradient(ref gradient_, xtd_);
            // and it squared norm
            qpt_ = Vector.DotProduct(gradient_, gradient_);

            // Return new step value
            return(t);
        }
Exemplo n.º 2
0
        //! Perform line search
        public override double value(Problem P,           // Optimization problem
                                     ref EndCriteria.Type ecType,
                                     EndCriteria endCriteria,
                                     double t_ini)    // initial value of line-search step
        {
            Constraint constraint = P.constraint();

            succeed_ = true;
            bool   maxIter    = false;
            double t          = t_ini;
            int    loopNumber = 0;

            double q0  = P.functionValue();
            double qp0 = P.gradientNormValue();

            double tl = 0.0;
            double tr = 0.0;

            qt_  = q0;
            qpt_ = (gradient_.empty()) ? qp0 : -Vector.DotProduct(gradient_, searchDirection_);

            // Initialize gradient
            gradient_ = new Vector(P.currentValue().size());
            // Compute new point
            xtd_ = P.currentValue();
            t    = update(ref xtd_, searchDirection_, t, constraint);
            // Compute function value at the new point
            qt_ = P.value(xtd_);

            while ((qt_ - q0) < -beta_ * t * qpt_ || (qt_ - q0) > -alpha_ * t * qpt_)
            {
                if ((qt_ - q0) > -alpha_ * t * qpt_)
                {
                    tr = t;
                }
                else
                {
                    tl = t;
                }
                ++loopNumber;

                // calculate the new step
                if (Utils.close_enough(tr, 0.0))
                {
                    t *= extrapolation_;
                }
                else
                {
                    t = (tl + tr) / 2.0;
                }

                // New point value
                xtd_ = P.currentValue();
                t    = update(ref xtd_, searchDirection_, t, constraint);

                // Compute function value at the new point
                qt_ = P.value(xtd_);
                P.gradient(gradient_, xtd_);
                // and it squared norm
                maxIter = endCriteria.checkMaxIterations(loopNumber, ref ecType);

                if (maxIter)
                {
                    break;
                }
            }

            if (maxIter)
            {
                succeed_ = false;
            }

            // Compute new gradient
            P.gradient(gradient_, xtd_);
            // and it squared norm
            qpt_ = Vector.DotProduct(gradient_, gradient_);

            // Return new step value
            return(t);
        }