Beispiel #1
0
        _fit(
            FuncDelegate objective,
            GradientDelegate gradient,
            double[] start_params = null,
            object fargs          = null,
            Dictionary <string, object> kwargs = null,
            HessianDelegate hessian            = null,
            string method    = "cg",
            int maxiter      = 100,
            bool full_output = true,
            bool disp        = true,
            alglib.ndimensional_rep callback = null,
            bool retall = true)
        {
            // Default value is array of zeros
            if (start_params == null)
            {
                var n = 10;
                start_params = new double[n];
            }
            Dictionary <string, FitDelegate>
            fit_funcs = new Dictionary <string, FitDelegate>
            {
                { "bc", _fit_bc },
                { "bleic", _fit_bleic },
                { "cg", _fit_cg },
                { "comp", _fit_comp },
                { "lbfgs", _fit_lbfgs },
                { "lm", _fit_lm },
                { "nlc", _fit_nlc },
                { "ns", _fit_ns },
                { "qp", _fit_qp }
            };

            string[]      _methods = { "bc", "bleic", "cg", "comp", "lbfgs", "lm", "nlc", "ns", "qp" };
            List <string> methods  = new List <string>(_methods);

            _check_method(method, methods);

            var func    = fit_funcs[method];
            var _output = func(objective, gradient, start_params, fargs, kwargs, disp, maxiter, callback, retall, full_output, hessian);
            var xopt    = _output.Item1;
            var retvals = _output.Item2;
            Dictionary <string, object>
            optim_settings = new Dictionary <string, object>
            {
                { "optimizer", method },
                { "start_params", start_params },
                { "maxiter", maxiter },
                { "full_output", full_output },
                { "disp", disp },
                { "fargs", fargs },
                { "callback", callback },
                { "retall", retall }
            };

            optim_settings = (Dictionary <string, object>)optim_settings.Concat(kwargs);
            return(Tuple.Create(xopt, retvals, optim_settings));
        }
Beispiel #2
0
 /// <summary>
 /// Wrapping function and gradient delegates into alglib version of gradient delegate
 /// </summary>
 /// <param name="objective"></param>
 /// <param name="gradient"></param>
 /// <returns></returns>
 public static alglib.ndimensional_grad _get_alglib_grad(
     FuncDelegate objective,
     GradientDelegate gradient)
 {
     return((double[] arg, ref double func, double[] grad, object obj) =>
     {
         func = objective(arg);
         grad = gradient(arg);
     });
 }
Beispiel #3
0
 _fit_qp(
     FuncDelegate objective,
     GradientDelegate gradient,
     double[] start_params,
     object fargs,
     Dictionary <string, object> kwargs,
     bool disp,
     int maxiter,
     alglib.ndimensional_rep callback,
     bool retall,
     bool full_output,
     HessianDelegate hessian)
 {
     throw new NotImplementedException();
 }
Beispiel #4
0
 public MethodResult Execute(FunctionDelegate F, GradientDelegate Gradient, OptimizationMethod OptimizationMethod, Vector StartVector, int MaxIterations)
 {
     stopwatch.Start();
     currentVector = StartVector;
     for (int i = 0; i < MaxIterations; i++)
     {
         oldVector = currentVector;
         deltaGradient = Gradient(currentVector);
         var lambda = OptimizationMethod((x) => { return F(currentVector - x * deltaGradient); }, -10, 10, 1E-9, MaxIterations);
         currentVector = currentVector - lambda * deltaGradient;
         if (deltaGradient.LengthSquared < epsilon)
         {
             stopwatch.Stop();
             return new MethodResult() { Result = new Vector[] { currentVector }, IterationEnd = false, Iterations = i, MethodName = "Метод градиентного спуска", stopwatch = stopwatch, StartPoint = new Vector[] { StartVector }, FunctionValue = F(currentVector) };
         }
     }
     stopwatch.Stop();
     return new MethodResult() { Result = new Vector[] { currentVector }, IterationEnd = true, MethodName = "Метод градиентного спуска", Iterations = MaxIterations, stopwatch = stopwatch, StartPoint = new Vector[] { StartVector }, FunctionValue = F(currentVector) };
 }
Beispiel #5
0
        _fit_cg(
            FuncDelegate objective,
            GradientDelegate gradient,
            double[] start_params,
            object fargs,
            Dictionary <string, object> kwargs,
            bool disp,
            int maxiter,
            alglib.ndimensional_rep callback,
            bool retall,
            bool full_output,
            HessianDelegate hessian)
        {
            var n = start_params.Length;

            double[] x    = new double[n];
            double   epsg = 0.0000000001;
            double   epsf = 0;
            double   epsx = 0;

            alglib.mincgstate  state;
            alglib.mincgreport rep;
            alglib.mincgcreate(start_params, out state);
            alglib.mincgsetcond(state, epsg, epsf, epsx, maxiter);
            var _grad = _get_alglib_grad(objective, gradient);

            alglib.mincgoptimize(state, _grad, callback, null);
            alglib.mincgresults(state, out x, out rep);
            // parse cg report into key-value pairs
            Dictionary <string, object> retvals = new Dictionary <string, object>();

            retvals.Add("iterationscount", rep.iterationscount);
            retvals.Add("nfev", rep.nfev);
            retvals.Add("varidx", rep.varidx);
            retvals.Add("terminationtype", rep.terminationtype);
            return(Tuple.Create(x, retvals));
        }
 public MethodResult Execute(FunctionDelegate F, GradientDelegate Gradient, OptimizationMethod OptimizationMethod, int MaxIterations)
 {
     stopwatch.Start();
     var antiGradient = -Gradient(x);
     var gradientSquare = antiGradient.LengthSquared;
     for (int i = 0; i < MaxIterations; i++)
     {
         var lambda = OptimizationMethod((alpha) => { return F(x + alpha * antiGradient); }, -10, 10, 1E-9, MaxIterations);
         x = x + lambda * antiGradient;
         var newAntiGradient = -Gradient(x);
         var newGradientSquare = newAntiGradient.LengthSquared;
         var beta = newGradientSquare / gradientSquare;
         if (i % (500) == 0 && i != 0) beta = 0;
         antiGradient = newAntiGradient + beta * antiGradient;
         gradientSquare = newGradientSquare;
         if (gradientSquare < epsilon)
         {
             stopwatch.Stop();
             return new MethodResult() { Result = new Vector[] { x }, IterationEnd = false, Iterations = i, MethodName = "Метод сопряженных градиентов", stopwatch = stopwatch , StartPoint = new Vector[] { StartVector}, FunctionValue = F(x)};
         }
     }
     stopwatch.Stop();
     return new MethodResult() { Result = new Vector[] { x }, IterationEnd = true, MethodName = "Метод сопряженных градиентов", Iterations = MaxIterations, stopwatch = stopwatch, StartPoint = new Vector[] { StartVector }, FunctionValue = F(x) };
 }
 public MethodResult Execute(FunctionDelegate F, GradientDelegate Gradient, OptimizationMethod OptimizationMethod, int MaxIterations)
 {
     stopwatch.Start();
     for (int i = 0; i < MaxIterations; i++)
     {
         var oldGradientValue = Gradient(currentValue);
         Vector newDirection = -(hessianMatrix * oldGradientValue);
         var lambda = OptimizationMethod((alpha) => { return F(currentValue + alpha * newDirection); }, -10, 10, 1E-9, MaxIterations);
         var vector = lambda * newDirection;
         currentValue = vector + currentValue;
         //var functionValue = F(currentValue);
         var gradientValue = Gradient(currentValue);
         if (gradientValue.LengthSquared < epsilon || vector.LengthSquared < epsilon)
         {
             stopwatch.Stop();
             return new MethodResult() { Result = new Vector[] { currentValue }, IterationEnd = false, MethodName = "Квазиньютоновский метод", Iterations = i, stopwatch = stopwatch, StartPoint = new Vector[] { StartVector }, FunctionValue = F(currentValue) };
         }
         var matrixU = gradientValue - oldGradientValue;
         var matrixA = (vector * vector.GetTranspose()) / (vector.ToMatrix().GetTranspose() * matrixU)[0, 0];
         var matrixB = -(hessianMatrix * matrixU * matrixU.GetTranspose() * hessianMatrix) / (matrixU.GetTranspose() * hessianMatrix * matrixU)[0, 0];
         hessianMatrix = hessianMatrix + matrixA + matrixB;
     }
     return new MethodResult() { Result = new Vector[] { currentValue }, IterationEnd = true, MethodName = "Квазиньютоновский метод", Iterations = MaxIterations, stopwatch = stopwatch, StartPoint = new Vector[] { StartVector }, FunctionValue = F(currentValue) };
 }