コード例 #1
0
        _fit(
            FuncDelegate objective,
            GradientDelegate gradient,
            double[] start_params = null,
            object fargs          = null,
            Dictionary <string, object> kwargs = null,
            HessianDelegate hessian            = null,
            string method    = "cg",
            int maxiter      = 100,
            bool full_output = true,
            bool disp        = true,
            alglib.ndimensional_rep callback = null,
            bool retall = true)
        {
            // Default value is array of zeros
            if (start_params == null)
            {
                var n = 10;
                start_params = new double[n];
            }
            Dictionary <string, FitDelegate>
            fit_funcs = new Dictionary <string, FitDelegate>
            {
                { "bc", _fit_bc },
                { "bleic", _fit_bleic },
                { "cg", _fit_cg },
                { "comp", _fit_comp },
                { "lbfgs", _fit_lbfgs },
                { "lm", _fit_lm },
                { "nlc", _fit_nlc },
                { "ns", _fit_ns },
                { "qp", _fit_qp }
            };

            string[]      _methods = { "bc", "bleic", "cg", "comp", "lbfgs", "lm", "nlc", "ns", "qp" };
            List <string> methods  = new List <string>(_methods);

            _check_method(method, methods);

            var func    = fit_funcs[method];
            var _output = func(objective, gradient, start_params, fargs, kwargs, disp, maxiter, callback, retall, full_output, hessian);
            var xopt    = _output.Item1;
            var retvals = _output.Item2;
            Dictionary <string, object>
            optim_settings = new Dictionary <string, object>
            {
                { "optimizer", method },
                { "start_params", start_params },
                { "maxiter", maxiter },
                { "full_output", full_output },
                { "disp", disp },
                { "fargs", fargs },
                { "callback", callback },
                { "retall", retall }
            };

            optim_settings = (Dictionary <string, object>)optim_settings.Concat(kwargs);
            return(Tuple.Create(xopt, retvals, optim_settings));
        }
コード例 #2
0
 _fit_qp(
     FuncDelegate objective,
     GradientDelegate gradient,
     double[] start_params,
     object fargs,
     Dictionary <string, object> kwargs,
     bool disp,
     int maxiter,
     alglib.ndimensional_rep callback,
     bool retall,
     bool full_output,
     HessianDelegate hessian)
 {
     throw new NotImplementedException();
 }
コード例 #3
0
        _fit_cg(
            FuncDelegate objective,
            GradientDelegate gradient,
            double[] start_params,
            object fargs,
            Dictionary <string, object> kwargs,
            bool disp,
            int maxiter,
            alglib.ndimensional_rep callback,
            bool retall,
            bool full_output,
            HessianDelegate hessian)
        {
            var n = start_params.Length;

            double[] x    = new double[n];
            double   epsg = 0.0000000001;
            double   epsf = 0;
            double   epsx = 0;

            alglib.mincgstate  state;
            alglib.mincgreport rep;
            alglib.mincgcreate(start_params, out state);
            alglib.mincgsetcond(state, epsg, epsf, epsx, maxiter);
            var _grad = _get_alglib_grad(objective, gradient);

            alglib.mincgoptimize(state, _grad, callback, null);
            alglib.mincgresults(state, out x, out rep);
            // parse cg report into key-value pairs
            Dictionary <string, object> retvals = new Dictionary <string, object>();

            retvals.Add("iterationscount", rep.iterationscount);
            retvals.Add("nfev", rep.nfev);
            retvals.Add("varidx", rep.varidx);
            retvals.Add("terminationtype", rep.terminationtype);
            return(Tuple.Create(x, retvals));
        }
        public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
                                               ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable <int> rows, bool applyLinearScaling,
                                               int maxIterations, bool updateVariableWeights = true,
                                               double lowerEstimationLimit = double.MinValue, double upperEstimationLimit              = double.MaxValue,
                                               bool updateConstantsInTree  = true, Action <double[], double, object> iterationCallback = null, EvaluationsCounter counter = null)
        {
            // numeric constants in the tree become variables for constant opt
            // variables in the tree become parameters (fixed values) for constant opt
            // for each parameter (variable in the original tree) we store the
            // variable name, variable value (for factor vars) and lag as a DataForVariable object.
            // A dictionary is used to find parameters
            double[] initialConstants;
            var      parameters = new List <TreeToAutoDiffTermConverter.DataForVariable>();

            TreeToAutoDiffTermConverter.ParametricFunction         func;
            TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;
            if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad))
            {
                throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
            }
            if (parameters.Count == 0)
            {
                return(0.0);                             // gkronber: constant expressions always have a R² of 0.0
            }
            var parameterEntries = parameters.ToArray(); // order of entries must be the same for x

            //extract inital constants
            double[] c;
            if (applyLinearScaling)
            {
                c    = new double[initialConstants.Length + 2];
                c[0] = 0.0;
                c[1] = 1.0;
                Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
            }
            else
            {
                c = (double[])initialConstants.Clone();
            }

            double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);

            if (counter == null)
            {
                counter = new EvaluationsCounter();
            }
            var rowEvaluationsCounter = new EvaluationsCounter();

            alglib.lsfitstate  state;
            alglib.lsfitreport rep;
            int retVal;

            IDataset ds = problemData.Dataset;

            double[,] x = new double[rows.Count(), parameters.Count];
            int row = 0;

            foreach (var r in rows)
            {
                int col = 0;
                foreach (var info in parameterEntries)
                {
                    if (ds.VariableHasType <double>(info.variableName))
                    {
                        x[row, col] = ds.GetDoubleValue(info.variableName, r + info.lag);
                    }
                    else if (ds.VariableHasType <string>(info.variableName))
                    {
                        x[row, col] = ds.GetStringValue(info.variableName, r) == info.variableValue ? 1 : 0;
                    }
                    else
                    {
                        throw new InvalidProgramException("found a variable of unknown type");
                    }
                    col++;
                }
                row++;
            }
            double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
            int      n = x.GetLength(0);
            int      m = x.GetLength(1);
            int      k = c.Length;

            alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(func);
            alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(func_grad);
            alglib.ndimensional_rep   xrep = (p, f, obj) => iterationCallback(p, f, obj);

            try {
                alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
                alglib.lsfitsetcond(state, 0.0, 0.0, maxIterations);
                alglib.lsfitsetxrep(state, iterationCallback != null);
                //alglib.lsfitsetgradientcheck(state, 0.001);
                alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter);
                alglib.lsfitresults(state, out retVal, out c, out rep);
            } catch (ArithmeticException) {
                return(originalQuality);
            } catch (alglib.alglibexception) {
                return(originalQuality);
            }

            counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n;
            counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n;

            //retVal == -7  => constant optimization failed due to wrong gradient
            if (retVal != -7)
            {
                if (applyLinearScaling)
                {
                    var tmp = new double[c.Length - 2];
                    Array.Copy(c, 2, tmp, 0, tmp.Length);
                    UpdateConstants(tree, tmp, updateVariableWeights);
                }
                else
                {
                    UpdateConstants(tree, c, updateVariableWeights);
                }
            }
            var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);

            if (!updateConstantsInTree)
            {
                UpdateConstants(tree, initialConstants, updateVariableWeights);
            }

            if (originalQuality - quality > 0.001 || double.IsNaN(quality))
            {
                UpdateConstants(tree, initialConstants, updateVariableWeights);
                return(originalQuality);
            }
            return(quality);
        }