public Tuple <Matrix, double> NewtonsMethod(Matrix x0, double eps, double a = 1, int steps = 100) { int n = function.Variables.Count; FunctionParser[] gradFunctional = new FunctionParser[n]; for (int i = 0; i < n; ++i) { gradFunctional[i] = function.DifferentiateBy(function.Variables[i]);//.Optimize(); } FunctionParser[,] HessianFunctional = new FunctionParser[n, n]; for (int i = 0; i < n; ++i) { for (int j = i; j < n; ++j) { FunctionParser tmp = function.DifferentiateBy(function.Variables[i]); tmp = tmp.DifferentiateBy(function.Variables[j]); tmp = tmp.Optimize(); HessianFunctional[i, j] = HessianFunctional[j, i] = tmp; //function.DifferentiateBy(function.Variables[i]).DifferentiateBy(function.Variables[j]).Optimize(); } } Matrix x = (Matrix)x0.Clone(), grad = CountGrad(x, gradFunctional), HessianMatrix; int step = steps; while (Norm(grad) >= eps && step-- > 0) { HessianMatrix = CountHessian(x, HessianFunctional); Matrix invertedH; if (!HessianMatrix.TryInvert(out invertedH)) { throw new ArgumentException("Ошибка в поиске обратной матрицы!"); } Matrix a_grad = a * grad, mult = a_grad * invertedH; x = x - mult; grad = CountGrad(x, gradFunctional); } if (step < 0) { throw new MethodDivergencyException($"Методу не удалось найти решение за {steps} шагов."); } return(new Tuple <Matrix, double>(x, f(x.ToVector()))); }