/// <summary> /// Solve x for Ax = b, where A is a invertible matrix /// /// The method works as follows: /// 1. QR factorization A = Q * R /// 2. Let: c = Q.transpose * b, we have R * x = c, where R is a triangular matrix if A is n x n /// 3. Solve x using backward substitution /// </summary> /// <param name="A">An m x n matrix with linearly independent columns, where m >= n</param> /// <param name="b">An m x 1 column vector </param> /// <returns>An n x 1 column vector, x, such that Ax = b when m = n and Ax ~ b when m > n </returns> public static IVector Solve(IMatrix A, IVector b) { // Q is a m x m matrix, R is a m x n matrix // Q = [Q1 Q2] Q1 is a m x n matrix, Q2 is a m x (m-n) matrix // R = [R1; 0] R1 is a n x n upper triangular matrix matrix // A = Q * R = Q1 * R1 IMatrix Q, R; QR.Factorize(A, out Q, out R); IVector c = Q.Transpose().Multiply(b); IVector x = BackwardSubstitution.Solve(R, c); return(x); }
/// <summary> /// This is used for data fitting / regression /// A is a m x n matrix, where m >= n /// b is a m x 1 column vector /// The method solves for x, which is a n x 1 column vectors such that A * x is closest to b /// /// The method works as follows: /// 1. Let C = A.transpose * A, we have A.transpose * A * x = C * x = A.transpose * b /// 2. Decompose C : C = Q * R, we have Q * R * x = A.transpose * b /// 3. Multiply both side by Q.transpose = Q.inverse, we have Q.transpose * Q * R * x = Q.transpose * A.transpose * b /// 4. Since Q.tranpose * Q = I, we have R * x = Q.transpose * A.transpose * b /// 5. Solve x from R * x = Q.transpose * A.transpose * b using backward substitution /// </summary> /// <param name="A"></param> /// <param name="b"></param> /// <returns></returns> public static IVector SolveLeastSquare(IMatrix A, IVector b) { IMatrix At = A.Transpose(); IMatrix C = At.Multiply(A); //C is a n x n matrix IMatrix Q, R; QR.Factorize(C, out Q, out R); IMatrix Qt = Q.Transpose(); IVector d = Qt.Multiply(At).Multiply(b); IVector x = BackwardSubstitution.Solve(R, d); return(x); }
/// <summary> /// Given A which is a n x n symmetric matrix, we want to find U and T /// such that: /// 1. A = U * T * U.transpose /// 2. U is a n x n matrix whose columns are eigen vectors of A (A * x = lambda * x, where lambda is the eigen value, and x is the eigen vector) /// 3. T is a diagonal matrix whose diagonal entries are the eigen values /// /// The method works in the following manner: /// 1. intialze A_0 = A, U_0 = I /// 2. iterate for k = 1, ... K, K is termination criteria /// 3. In each iteration k: /// 3.1 QR factorization to find Q_k and R_k such that A_{k-1}=Q_k * R_k /// 3.2 Let A_k = R_k * Q_k /// 3.3 Let U_k = U_{k-1} * Q_k /// 4. Set T = A_K, U = U_K /// /// Note that U is an orthogonal matrix if A is a symmetric matrix, in other words, if A.transpose = A, then U.inverse = U.transpose /// </summary> /// <param name="A">The matrix to be factorized</param> /// <param name="K">maximum number of iterations</param> /// <param name="T">T is a diagonal matrix whose diagonal entries are the eigen values</param> /// <param name="U">U is a n x n matrix whose columns are eigen vectors of A</param> public static void Factorize(IMatrix A, out IMatrix T, out IMatrix U, int K = 100, double epsilon = 1e-10) { Debug.Assert(A.RowCount == A.ColCount); int n = A.RowCount; IMatrix A_k = A.Clone(); IMatrix U_k = A.Identity(n); IMatrix Q_k, R_k; for (int k = 1; k <= K; ++k) { QR.Factorize(A_k, out Q_k, out R_k); A_k = R_k.Multiply(Q_k); U_k = U_k.Multiply(Q_k); double sum = 0; foreach (IVector rowVec in A_k.NonEmptyRows) { int rowId = rowVec.ID; foreach (int key in rowVec.NonEmptyKeys) { if (key == rowId) { continue; } sum += System.Math.Abs(rowVec[key]); } } if (sum <= epsilon) { break; } } T = new SparseMatrix(n, n, A.DefaultValue); for (int i = 0; i < n; ++i) { T[i, i] = A_k[i, i]; } U = U_k; }