private static alglib.ndimensional_pgrad CreatePGrad(AutoDiff.IParametricCompiledTerm compiledFunc)
 {
     return((double[] c, double[] x, ref double func, double[] grad, object o) => {
         var tupel = compiledFunc.Differentiate(c, x);
         func = tupel.Item2;
         Array.Copy(tupel.Item1, grad, grad.Length);
     });
 }
 private static alglib.ndimensional_pfunc CreatePFunc(AutoDiff.IParametricCompiledTerm compiledFunc)
 {
     return((double[] c, double[] x, ref double func, object o) => {
         func = compiledFunc.Evaluate(c, x);
     });
 }
        public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable <int> rows, bool applyLinearScaling, int maxIterations, bool updateVariableWeights = true, double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue, bool updateConstantsInTree = true)
        {
            List <AutoDiff.Variable> variables     = new List <AutoDiff.Variable>();
            List <AutoDiff.Variable> parameters    = new List <AutoDiff.Variable>();
            List <string>            variableNames = new List <string>();

            AutoDiff.Term func;
            if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out func))
            {
                throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
            }
            if (variableNames.Count == 0)
            {
                return(0.0);
            }

            AutoDiff.IParametricCompiledTerm compiledFunc = func.Compile(variables.ToArray(), parameters.ToArray());

            List <SymbolicExpressionTreeTerminalNode> terminalNodes = null;

            if (updateVariableWeights)
            {
                terminalNodes = tree.Root.IterateNodesPrefix().OfType <SymbolicExpressionTreeTerminalNode>().ToList();
            }
            else
            {
                terminalNodes = new List <SymbolicExpressionTreeTerminalNode>(tree.Root.IterateNodesPrefix().OfType <ConstantTreeNode>());
            }

            //extract inital constants
            double[] c = new double[variables.Count];
            {
                c[0] = 0.0;
                c[1] = 1.0;
                int i = 2;
                foreach (var node in terminalNodes)
                {
                    ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
                    VariableTreeNode variableTreeNode = node as VariableTreeNode;
                    if (constantTreeNode != null)
                    {
                        c[i++] = constantTreeNode.Value;
                    }
                    else if (updateVariableWeights && variableTreeNode != null)
                    {
                        c[i++] = variableTreeNode.Weight;
                    }
                }
            }
            double[] originalConstants = (double[])c.Clone();
            double   originalQuality   = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);

            alglib.lsfitstate  state;
            alglib.lsfitreport rep;
            int info;

            IDataset ds = problemData.Dataset;

            double[,] x = new double[rows.Count(), variableNames.Count];
            int row = 0;

            foreach (var r in rows)
            {
                for (int col = 0; col < variableNames.Count; col++)
                {
                    x[row, col] = ds.GetDoubleValue(variableNames[col], r);
                }
                row++;
            }
            double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
            int      n = x.GetLength(0);
            int      m = x.GetLength(1);
            int      k = c.Length;

            alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(compiledFunc);
            alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(compiledFunc);

            try {
                alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
                alglib.lsfitsetcond(state, 0.0, 0.0, maxIterations);
                //alglib.lsfitsetgradientcheck(state, 0.001);
                alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, null, null);
                alglib.lsfitresults(state, out info, out c, out rep);
            }
            catch (ArithmeticException) {
                return(originalQuality);
            }
            catch (alglib.alglibexception) {
                return(originalQuality);
            }

            //info == -7  => constant optimization failed due to wrong gradient
            if (info != -7)
            {
                UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
            }
            var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);

            if (!updateConstantsInTree)
            {
                UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
            }
            if (originalQuality - quality > 0.001 || double.IsNaN(quality))
            {
                UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
                return(originalQuality);
            }
            return(quality);
        }