private IVector <T> MinimizeInternal(ILeastSquaresFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                             IVector <T> maximumParameters = default)
        {
            var la = LinearAlgebra.Value;
            var x  = initialParameters.Clone() as IVector <T>;

            var bindF    = function.Bind((x));
            var iter     = 0;
            var residual = objective.Residual(bindF);
            var error    = la.Sqrt(la.Dot(residual.ToArray(), residual.ToArray()));
            var oldError = la.Cast(1000);

            while (la.Compare(la.Sub(error, oldError), Eps) == 1 || iter++ < MaxIteration)
            {
                var jacobi  = objective.Jacobian(bindF);
                var jacobiT = jacobi.Transpose();   //JT
                var jTj     = jacobiT.Mult(jacobi); //jTj
                var jTj_1   = jTj.Inverse();
                var jTj_1jT = jTj_1.Mult(jacobiT);
                var temp    = jTj_1jT.Mult(residual);
                var gamma   = GoldenRatioMethod <T> .FindMin(objective, function, x, temp, Eps);

                x.Sub(temp.Mult(gamma));
                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, x, la);
                oldError = error;
                bindF    = function.Bind(x);
                residual = objective.Residual(bindF);
                error    = la.Sqrt(la.Dot(residual.ToArray(), residual.ToArray()));
            }

            return(x);
        }
Пример #2
0
        public IVector <T> Minimize(IFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                    IVector <T> maximumParameters = default)
        {
            if (!(objective is IDifferentiableFunctional <T> o1))
            {
                throw new ArgumentException("This optimizer accept only IDifferentiableFunctional", nameof(objective));
            }

            return(MinimizeInternal(o1, function, initialParameters, minimumParameters, maximumParameters));
        }
Пример #3
0
        public static T FindMin(IFunctional <T> functional, IParametricFunction <T> function, IVector <T> s, IVector <T> p, T eps)
        {
            var la = LinearAlgebra.Value;

            var a = la.Cast(0);
            var b = la.Cast(1e2);
            var x = la.Sum(a, la.Mult(la.Mult(la.Cast(0.5), la.Sub(la.Cast(3), la.Sqrt(la.Cast(5.0)))), (la.Sub(b, a))));
            var y = la.Sum(la.Sub(b, x), a);

            var fx     = function.Bind(s.AddWithCloning(p.MultWithCloning(la.Mult(x, la.Cast(-1)))));
            var fy     = function.Bind(s.AddWithCloning(p.MultWithCloning(la.Mult(y, la.Cast(-1)))));
            var valueX = functional.Value(fx);

            var valueY = functional.Value(fy);

            while (la.Compare(la.Abs(la.Sub(b, a)), la.Cast(1e-5)) == 1)
            {
                if (la.Compare(valueX, valueY) == -1)
                {
                    b      = y;
                    y      = x;
                    fy     = fx;
                    valueY = valueX;
                    x      = la.Sub(la.Sum(b, a), y);
                    fx     = function.Bind(s.AddWithCloning(p.MultWithCloning(la.Mult(x, la.Cast(-1)))));
                    valueX = functional.Value(fx);
                }
                else
                {
                    a      = x;
                    x      = y;
                    fx     = fy;
                    valueX = valueY;
                    y      = la.Sub(la.Sum(b, a), x);
                    fy     = function.Bind(s.AddWithCloning(p.MultWithCloning(la.Mult(y, la.Cast(-1)))));
                    valueY = functional.Value(fy);
                }
            }
            return(la.Div(la.Sum(a, b), la.Cast(2)));
        }
Пример #4
0
        public IVector <T> Minimize(IFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                    IVector <T> maximumParameters = default)
        {
            var         k     = 0;
            var         la    = LinearAlgebra.Value;
            IVector <T> xPrev = initialParameters.Clone() as IVector <T>;
            IVector <T> xNew  = initialParameters.Clone() as IVector <T>;

            var normalDist = new Normal(Mean, StdDev);
            T   prevValue  = objective.Value(function.Bind(xPrev));

            do
            {
                var t = 20d / Math.Log(k, Math.E);

                for (int i = 0; i < xPrev.Count; i++)
                {
                    var nR = normalDist.Sample() * t;
                    xNew[i] = la.Sum(xPrev[i], la.Cast(nR));
                }

                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, xNew, la);

                var newValue = objective.Value(function.Bind(xNew));

                var sub = la.Sub(newValue, prevValue);

                if (la.Compare(sub, la.GetZeroValue()) == -1) // || la.Exp(la.Mult(la.Cast(-1/t), sub)) >= rand.NextDouble())
                {
                    prevValue = newValue;
                    xPrev     = xNew.Clone() as IVector <T>;
                }
            } while ((MaxIter.HasValue && MaxIter > k++ && la.Compare(prevValue, Eps) == 1) || (!MaxIter.HasValue && la.Compare(prevValue, Eps) == 1));

            return(xPrev);
        }
Пример #5
0
        private IVector <T> MinimizeInternal(IDifferentiableFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                             IVector <T> maximumParameters = default)
        {
            var la = LinearAlgebra.Value;

            var xOld = initialParameters.Clone() as IVector <T>;

            //Calculate initial gradient
            var bindF   = function.Bind(xOld);
            var fiNew   = objective.Gradient(bindF);
            var xNew    = initialParameters.Clone() as IVector <T>;
            var k       = 0;
            var normOld = la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray()));
            var normNew = la.Cast(1000);

            while (k++ < MaxIteration && (la.Compare(la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray())), Eps) == 1) && (la.Compare(la.Sub(normNew, normOld), Eps) == 1))
            {
                bindF   = function.Bind(xNew);
                fiNew   = objective.Gradient(bindF);
                normOld = normNew;
                normNew = la.Sqrt(la.Dot(fiNew.ToArray(), fiNew.ToArray()));

                var gamma = GoldenRatioMethod <T> .FindMin(objective, function, xOld, fiNew, Eps);

                xOld = xNew.Clone() as IVector <T>;
                xNew = xOld.Sub(fiNew.MultWithCloning(gamma));
                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, xNew, la);
            }

            return(xNew);
        }
Пример #6
0
        private IVector <T> MinimizeInternal(IDifferentiableFunctional <T> objective, IParametricFunction <T> function, IVector <T> initialParameters, IVector <T> minimumParameters = default,
                                             IVector <T> maximumParameters = default)
        {
            var la = LinearAlgebra.Value;


            var xNew     = initialParameters.Clone() as IVector <T>;
            var bindF    = function.Bind(xNew);
            var curVal   = objective.Value(bindF);
            var prevVal  = curVal;
            var gradient = objective.Gradient(bindF);
            var p        = objective.Gradient(bindF).Clone() as IVector <T>;

            gradient.Mult(la.Cast(-1));
            var gradSquare = la.Dot(p.ToArray(), p.ToArray());

            int numIter = 0;

            do
            {
                T           alpha, beta, newGradSquare;
                IVector <T> newGrad;

                //Ищем минимум F(x + alpha * p) с помощью метода одномерной оптимизации
                alpha = GoldenRatioMethod <T> .FindMin(objective, function, xNew, p, Eps);

                xNew = xNew.Add(p.MultWithCloning(la.Mult(la.Cast(-1), alpha)));

                this.ApplyMinimumAndMaximumValues(minimumParameters, maximumParameters, xNew, la);
                bindF         = function.Bind(xNew);
                newGrad       = objective.Gradient(bindF).Mult(la.Cast(-1));
                newGradSquare = la.Dot(newGrad.ToArray(), newGrad.ToArray());

                beta = numIter % (5 * SpaceSize) == 0
                    ? la.GetZeroValue()
                    : la.Div(la.Mult(la.Cast(-1), la.Sub(newGradSquare, la.Dot(newGrad.ToArray(), gradient.ToArray()))), gradSquare);

                p.Mult(beta).Add(newGrad);

                prevVal = curVal;
                curVal  = objective.Value(bindF);

                gradient   = newGrad;
                gradSquare = newGradSquare;
            } while (la.Compare(gradSquare, Eps) == 1 && MaxIteration > numIter++);

            return(xNew);
        }