private IterativeStatistics SolveInternal(int maxIterations) { // δnew = δ0 = r * r resDotRes = residual.DotProduct(residual); // The convergence criterion must be initialized immediately after the first r and r*r are computed. residualConvergence.Initialize(this); // This is also used as output double residualNormRatio = double.NaN; // d = r direction = residual.Copy(); // Allocate memory for other vectors, which will be reused during each iteration matrixTimesDirection = Rhs.CreateZeroVectorWithSameFormat(); for (Iteration = 0; Iteration < maxIterations; ++Iteration) { // q = A * d Matrix.Multiply(direction, matrixTimesDirection); // α = δnew / (d * q) StepSize = ResDotRes / (direction.DotProduct(matrixTimesDirection)); // x = x + α * d solution.AxpyIntoThis(direction, StepSize); // δold = δnew double resDotResOld = ResDotRes; // Normally the residual vector is updated as: r = r - α * q and δnew = r * r. // However corrections might need to be applied. residualUpdater.UpdateResidual(this, residual, out resDotRes); // At this point we can check if CG has converged and exit, thus avoiding the uneccesary operations that follow. residualNormRatio = residualConvergence.EstimateResidualNormRatio(this); if (residualNormRatio <= residualTolerance) { return(new IterativeStatistics { AlgorithmName = name, HasConverged = true, NumIterationsRequired = Iteration + 1, ResidualNormRatioEstimation = residualNormRatio }); } // β = δnew / δold ParamBeta = ResDotRes / resDotResOld; // d = r + β * d //TODO: benchmark the two options to find out which is faster //direction = residual.Axpy(direction, beta); //This allocates a new vector d, copies r and GCs the existing d. direction.LinearCombinationIntoThis(ParamBeta, residual, 1.0); //This performs additions instead of copying and needless multiplications. } // We reached the max iterations before CG converged return(new IterativeStatistics { AlgorithmName = name, HasConverged = false, NumIterationsRequired = maxIterations, ResidualNormRatioEstimation = residualNormRatio }); }