Exemplo n.º 1
0
        private IVector <T> MinimizeInternal(IDifferentiableFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                             IVector <T> maximumParameters = default)
        {
            var la = LinearAlgebra.Value;

            var xOld = initialParameters.Clone() as IVector <T>;

            //Calculate initial gradient
            var bindF   = function.Bind(xOld);
            var fiNew   = objective.Gradient(bindF);
            var xNew    = initialParameters.Clone() as IVector <T>;
            var k       = 0;
            var normOld = la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray()));
            var normNew = la.Cast(1000);

            while (k++ < MaxIteration && (la.Compare(la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray())), Eps) == 1) && (la.Compare(la.Sub(normNew, normOld), Eps) == 1))
            {
                bindF   = function.Bind(xNew);
                fiNew   = objective.Gradient(bindF);
                normOld = normNew;
                normNew = la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray()));

                var gamma = GoldenRatioMethod <T> .FindMin(objective, function, xOld, fiNew, Eps);

                xOld = xNew.Clone() as IVector <T>;
                xNew = xOld.Sub(fiNew.MultWithCloning(gamma));
                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, xNew, la);
            }

            return(xNew);
        }
Exemplo n.º 2
0
        private IVector <T> MinimizeInternal(IDifferentiableFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                             IVector <T> maximumParameters = default)
        {
            var la = LinearAlgebra.Value;


            var xNew     = initialParameters.Clone() as IVector <T>;
            var bindF    = function.Bind(xNew);
            var curVal   = objective.Value(bindF);
            var prevVal  = curVal;
            var gradient = objective.Gradient(bindF);
            var p        = objective.Gradient(bindF).Clone() as IVector <T>;

            gradient.Mult(la.Cast(-1));
            var gradSquare = la.Dot(p.ToArray(), p.ToArray());

            int numIter = 0;

            do
            {
                T           alpha, beta, newGradSquare;
                IVector <T> newGrad;

                //Ищем минимум F(x + alpha * p) с помощью метода одномерной оптимизации
                alpha = GoldenRatioMethod <T> .FindMin(objective, function, xNew, p, Eps);

                xNew = xNew.Add(p.MultWithCloning(la.Mult(la.Cast(-1), alpha)));

                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, xNew, la);
                bindF         = function.Bind(xNew);
                newGrad       = objective.Gradient(bindF).Mult(la.Cast(-1));
                newGradSquare = la.Dot(newGrad.ToArray(), newGrad.ToArray());

                beta = numIter % (5 * SpaceSize) == 0
                    ? la.GetZeroValue()
                    : la.Div(la.Mult(la.Cast(-1), la.Sub(newGradSquare, la.Dot(newGrad.ToArray(), gradient.ToArray()))), gradSquare);

                p.Mult(beta).Add(newGrad);

                prevVal = curVal;
                curVal  = objective.Value(bindF);

                gradient   = newGrad;
                gradSquare = newGradSquare;
            } while (la.Compare(gradSquare, Eps) == 1 && MaxIteration > numIter++);

            return(xNew);
        }