private IVector <T> MinimizeInternal(ILeastSquaresFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                             IVector <T> maximumParameters = default)
        {
            var la = LinearAlgebra.Value;
            var x  = initialParameters.Clone() as IVector <T>;

            var bindF    = function.Bind((x));
            var iter     = 0;
            var residual = objective.Residual(bindF);
            var error    = la.Sqrt(la.Dot(residual.ToArray(), residual.ToArray()));
            var oldError = la.Cast(1000);

            while (la.Compare(la.Sub(error, oldError), Eps) == 1 || iter++ < MaxIteration)
            {
                var jacobi  = objective.Jacobian(bindF);
                var jacobiT = jacobi.Transpose();   //JT
                var jTj     = jacobiT.Mult(jacobi); //jTj
                var jTj_1   = jTj.Inverse();
                var jTj_1jT = jTj_1.Mult(jacobiT);
                var temp    = jTj_1jT.Mult(residual);
                var gamma   = GoldenRatioMethod <T> .FindMin(objective, function, x, temp, Eps);

                x.Sub(temp.Mult(gamma));
                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, x, la);
                oldError = error;
                bindF    = function.Bind(x);
                residual = objective.Residual(bindF);
                error    = la.Sqrt(la.Dot(residual.ToArray(), residual.ToArray()));
            }

            return(x);
        }
Пример #2
0
        private IVector <T> MinimizeInternal(IDifferentiableFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                             IVector <T> maximumParameters = default)
        {
            var la = LinearAlgebra.Value;

            var xOld = initialParameters.Clone() as IVector <T>;

            //Calculate initial gradient
            var bindF   = function.Bind(xOld);
            var fiNew   = objective.Gradient(bindF);
            var xNew    = initialParameters.Clone() as IVector <T>;
            var k       = 0;
            var normOld = la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray()));
            var normNew = la.Cast(1000);

            while (k++ < MaxIteration && (la.Compare(la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray())), Eps) == 1) && (la.Compare(la.Sub(normNew, normOld), Eps) == 1))
            {
                bindF   = function.Bind(xNew);
                fiNew   = objective.Gradient(bindF);
                normOld = normNew;
                normNew = la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray()));

                var gamma = GoldenRatioMethod <T> .FindMin(objective, function, xOld, fiNew, Eps);

                xOld = xNew.Clone() as IVector <T>;
                xNew = xOld.Sub(fiNew.MultWithCloning(gamma));
                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, xNew, la);
            }

            return(xNew);
        }
Пример #3
0
        private IVector <T> MinimizeInternal(IDifferentiableFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                             IVector <T> maximumParameters = default)
        {
            var la = LinearAlgebra.Value;


            var xNew     = initialParameters.Clone() as IVector <T>;
            var bindF    = function.Bind(xNew);
            var curVal   = objective.Value(bindF);
            var prevVal  = curVal;
            var gradient = objective.Gradient(bindF);
            var p        = objective.Gradient(bindF).Clone() as IVector <T>;

            gradient.Mult(la.Cast(-1));
            var gradSquare = la.Dot(p.ToArray(), p.ToArray());

            int numIter = 0;

            do
            {
                T           alpha, beta, newGradSquare;
                IVector <T> newGrad;

                //Ищем минимум F(x + alpha * p) с помощью метода одномерной оптимизации
                alpha = GoldenRatioMethod <T> .FindMin(objective, function, xNew, p, Eps);

                xNew = xNew.Add(p.MultWithCloning(la.Mult(la.Cast(-1), alpha)));

                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, xNew, la);
                bindF         = function.Bind(xNew);
                newGrad       = objective.Gradient(bindF).Mult(la.Cast(-1));
                newGradSquare = la.Dot(newGrad.ToArray(), newGrad.ToArray());

                beta = numIter % (5 * SpaceSize) == 0
                    ? la.GetZeroValue()
                    : la.Div(la.Mult(la.Cast(-1), la.Sub(newGradSquare, la.Dot(newGrad.ToArray(), gradient.ToArray()))), gradSquare);

                p.Mult(beta).Add(newGrad);

                prevVal = curVal;
                curVal  = objective.Value(bindF);

                gradient   = newGrad;
                gradSquare = newGradSquare;
            } while (la.Compare(gradSquare, Eps) == 1 && MaxIteration > numIter++);

            return(xNew);
        }