public static bool TryFindRootWithJacobianStep(Func <Real[], Real[]> f, Real[] initialGuess, Real accuracy, int maxIterations, Real jacobianStepSize, out Real[] root) { DenseVector denseVector1 = new DenseVector(initialGuess); Real[] numArray = f(initialGuess); DenseVector denseVector2 = new DenseVector(numArray); Real num1 = ToReal(denseVector2.L2Norm()); Matrix <Real> approximateJacobian = Broyden.CalculateApproximateJacobian(f, initialGuess, numArray, jacobianStepSize); for (int index = 0; index <= maxIterations; ++index) { DenseVector denseVector3 = (DenseVector)(-approximateJacobian.LU().Solve(denseVector2)); DenseVector denseVector4 = denseVector1 + denseVector3; DenseVector denseVector5 = new DenseVector(f(denseVector4.Values)); Real num2 = ToReal(denseVector5.L2Norm()); if (num2 > num1) { Real num3 = num1 * num1; Real num4 = num3 / (num3 + num2 * num2); if (num4 == 0.0) { num4 = ToReal(0.0001); } denseVector3 = num4 * denseVector3; denseVector4 = denseVector1 + denseVector3; denseVector5 = new DenseVector(f(denseVector4.Values)); num2 = ToReal(denseVector5.L2Norm()); } if (num2 < accuracy) { root = denseVector4.Values; return(true); } Matrix <Real> matrix = (denseVector5 - denseVector2 - approximateJacobian.Multiply(denseVector3)).ToColumnMatrix() * denseVector3.Multiply(ToReal(1.0) / Math.Pow(ToReal(denseVector3.L2Norm()), ToReal(2.0))) .ToRowMatrix(); approximateJacobian += matrix; denseVector1 = denseVector4; denseVector2 = denseVector5; num1 = num2; } root = null; return(false); }
public void SetLengthVectorTest() { Vector <float> vec = new DenseVector(new[] { 0f, 1f }); float desiredLength = 1000; VectorMath.SetLength(vec, desiredLength); Assert.AreEqual(desiredLength, vec.L2Norm()); }
public static Vector <double> GradientDescentSplitStep(Vector <double> x0, double eps) { Vector <Double> xCur = x0; Vector <double> grad = new DenseVector(2) { [0] = Df_dx(x0[1], x0[1], eps / 10), //using vector x0 fill the grad by-coordinate [1] = Df_dy(x0[0], x0[1], eps / 10) }; double alpha = 0.8 * eps; //eps/10 var xNext = xCur - grad / grad.L2Norm() * alpha; //нормируем направление шага int iteration = 1; do { #region Output Console.WriteLine($"Iter {iteration++}: ({xCur[0]}, {xCur[1]}) " + $"Grad: ({grad[0]}, {grad[1]}) " + $"||Grad||:{grad.L2Norm()} " + $"||xCur-xNext||:{(xCur - xNext).L2Norm()} " + $"|F(xCur) - F(xNext)|: {Math.Abs(F(xCur) - F(xNext))}"); #endregion xCur = xNext; grad[0] = Df_dx(xCur[0], xCur[1], eps / 10); grad[1] = Df_dy(xCur[0], xCur[1], eps / 10); xNext = xCur - grad / grad.L2Norm() * alpha; //вводим нормированный градиент; ненормированный используем для условия выхода } while (!((xCur - xNext).L2Norm() < eps && Math.Abs(F(xCur) - F(xNext)) < eps && grad.L2Norm() < eps)); Console.WriteLine($"Iter {iteration}: ({xCur[0]}, {xCur[1]}) " + $"Grad: ({grad[0]}, {grad[1]}) " + $"||Grad||:{grad.L2Norm()} " + $"||xCur-xNext||:{(xCur - xNext).L2Norm()} " + $"|F(xCur) - F(xNext)|: {Math.Abs(F(xCur) - F(xNext))}"); return(xNext); }
/// <summary>Find a solution of the equation f(x)=0.</summary> /// <param name="f">The function to find roots from.</param> /// <param name="initialGuess">Initial guess of the root.</param> /// <param name="accuracy">Desired accuracy. The root will be refined until the accuracy or the maximum number of iterations is reached.</param> /// <param name="maxIterations">Maximum number of iterations. Usually 100.</param> /// <param name="jacobianStepSize">Relative step size for calculating the Jacobian matrix at first step.</param> /// <param name="root">The root that was found, if any. Undefined if the function returns false.</param> /// <returns>True if a root with the specified accuracy was found, else false.</returns> public static bool TryFindRootWithJacobianStep(Func <double[], double[]> f, double[] initialGuess, double accuracy, int maxIterations, double jacobianStepSize, out double[] root) { var x = new DenseVector(initialGuess); double[] y0 = f(initialGuess); var y = new DenseVector(y0); double g = y.L2Norm(); Matrix <double> B = CalculateApproximateJacobian(f, initialGuess, y0, jacobianStepSize); for (int i = 0; i <= maxIterations; i++) { var dx = (DenseVector)(-B.LU().Solve(y)); var xnew = x + dx; var ynew = new DenseVector(f(xnew.Values)); double gnew = ynew.L2Norm(); if (gnew > g) { double g2 = g * g; double scale = g2 / (g2 + gnew * gnew); if (scale == 0.0) { scale = 1.0e-4; } dx = scale * dx; xnew = x + dx; ynew = new DenseVector(f(xnew.Values)); gnew = ynew.L2Norm(); } if (gnew < accuracy) { root = xnew.Values; return(true); } // update Jacobian B DenseVector dF = ynew - y; Matrix <double> dB = (dF - B.Multiply(dx)).ToColumnMatrix() * dx.Multiply(1.0 / Math.Pow(dx.L2Norm(), 2)).ToRowMatrix(); B = B + dB; x = xnew; y = ynew; g = gnew; } root = null; return(false); }
public static Vector <double> GradientProjection(Vector <double> x0, double eps) { var xCur = x0.GetProjectionHyperplane(HyperPlane, 1); Vector <double> grad = new DenseVector(3) { [0] = Df_dx(xCur[0], xCur[1], xCur[2], eps), [1] = Df_dy(xCur[0], xCur[1], xCur[2], eps), [2] = Df_dz(xCur[0], xCur[1], xCur[2], eps) }; //подбираем длину шага в направлении проекции var alpha = GoldenRatioAlpha(xCur, eps, grad.Normalize(2).GetProjectionHyperplane(HyperPlane, 1)); //project??? //.GetProjectionHyperplane(HyperPlane, 1) //(HyperPlane,a)=1 var xNext = (xCur - alpha * grad.Normalize(2)).GetProjectionHyperplane(HyperPlane, 1); //нормируем направление шага var iteration = 1; // normalized do { #region Output if (iteration < 100000) { Console.WriteLine($"Iter {iteration++}: xCur:{(xCur).ToStR()}" + $" F: {F(xCur)}" + $"||Grad||:{grad.L2Norm()} " + $"||xCur-xNext||:{(xCur - xNext).L2Norm()}"); } else { Console.WriteLine(alpha); } #endregion if (!IsOnPlane(HyperPlane, xCur, eps)) { throw new Exception(); } xCur = xNext; grad[0] = Df_dx(xCur[0], xCur[1], xCur[2], eps / 1000); grad[1] = Df_dy(xCur[0], xCur[1], xCur[2], eps / 1000); grad[2] = Df_dz(xCur[0], xCur[1], xCur[2], eps / 1000); //подбираем длину шага в направлении проекции alpha = GoldenRatioAlpha(xCur, eps, grad); //project??? xNext = (xCur - alpha * grad) .GetProjectionHyperplane(HyperPlane, 1); //вводим нормированный градиент }while (!((xCur - xNext).L2Norm() < eps)); //normalized now Console.WriteLine($"Iter {iteration}: xCur:{xNext.ToStR()} " + $"||xCur-xNext||:{(xCur - xNext).L2Norm()} " + $"F = : {F(xNext)}"); return(xNext); }