public void CanAddSparseMatricesBothWays() { var m1 = new SparseMatrix(1, 3); var m2 = new SparseMatrix(new double[, ] { { 0, 1, 1 } }); var sum1 = m1 + m2; var sum2 = m2 + m1; Assert.IsTrue(sum1.Equals(m2)); Assert.IsTrue(sum1.Equals(sum2)); var sparseResult = new SparseMatrix(1, 3); sparseResult.Add(m2, sparseResult); Assert.IsTrue(sparseResult.Equals(sum1)); sparseResult = new SparseMatrix(new double[, ] { { 0, 1, 1 } }); sparseResult.Add(m1, sparseResult); Assert.IsTrue(sparseResult.Equals(sum1)); sparseResult = new SparseMatrix(new double[, ] { { 0, 1, 1 } }); m1.Add(sparseResult, sparseResult); Assert.IsTrue(sparseResult.Equals(sum1)); sparseResult = new SparseMatrix(new double[, ] { { 0, 1, 1 } }); sparseResult.Add(sparseResult, sparseResult); Assert.IsTrue(sparseResult.Equals(2 * sum1)); var denseResult = new DenseMatrix(1, 3); denseResult.Add(m2, denseResult); Assert.IsTrue(denseResult.Equals(sum1)); denseResult = new DenseMatrix(new double[, ] { { 0, 1, 1 } }); denseResult.Add(m1, denseResult); Assert.IsTrue(denseResult.Equals(sum1)); var m3 = new DenseMatrix(new double[, ] { { 0, 1, 1 } }); var sum3 = m1 + m3; var sum4 = m3 + m1; Assert.IsTrue(sum3.Equals(m3)); Assert.IsTrue(sum3.Equals(sum4)); }
/// <summary> /// Transform to a sparse matrix. /// </summary> /// <param name="ratings"></param> /// <returns></returns> public static SparseMatrix <double> ToSparseMatrix(this List <Rating> ratings) { SparseMatrix <double> matrix = new SparseMatrix <double>(); foreach (var r in ratings) { if (!matrix.HasEntry(r.UserId, r.ItemId)) { matrix.Add(r.UserId, r.ItemId, r.Score); } } return(matrix); }
public void CanAddSparseMatricesBothWays() { var m1 = new SparseMatrix(1, 3); var m2 = new SparseMatrix(new Complex32[,] { { 0, 1, 1 } }); var sum1 = m1 + m2; var sum2 = m2 + m1; Assert.IsTrue(sum1.Equals(m2)); Assert.IsTrue(sum1.Equals(sum2)); var sparseResult = new SparseMatrix(1, 3); sparseResult.Add(m2, sparseResult); Assert.IsTrue(sparseResult.Equals(sum1)); sparseResult = new SparseMatrix(new Complex32[,] { { 0, 1, 1 } }); sparseResult.Add(m1, sparseResult); Assert.IsTrue(sparseResult.Equals(sum1)); sparseResult = new SparseMatrix(new Complex32[,] { { 0, 1, 1 } }); m1.Add(sparseResult, sparseResult); Assert.IsTrue(sparseResult.Equals(sum1)); sparseResult = new SparseMatrix(new Complex32[,] { { 0, 1, 1 } }); sparseResult.Add(sparseResult, sparseResult); Assert.IsTrue(sparseResult.Equals(2 * sum1)); var denseResult = new DenseMatrix(1, 3); denseResult.Add(m2, denseResult); Assert.IsTrue(denseResult.Equals(sum1)); denseResult = new DenseMatrix(new Complex32[,] { { 0, 1, 1 } }); denseResult.Add(m1, denseResult); Assert.IsTrue(denseResult.Equals(sum1)); var m3 = new DenseMatrix(new Complex32[,] { { 0, 1, 1 } }); var sum3 = m1 + m3; var sum4 = m3 + m1; Assert.IsTrue(sum3.Equals(m3)); Assert.IsTrue(sum3.Equals(sum4)); }
/// <summary> /// GP Regression Method /// </summary> /// <param name="targets">the actual y value of the data points</param> /// <param name="future_point_count">the future point count to predict</param> /// <param name="sigma_0">amplitude of SE covariance</param> /// <param name="lambda">length scale of SE covariance</param> /// <param name="sigma_n">standard deviation of output noise</param> /// <param name="fstar">mean of GP</param> /// <param name="Vfstar">variance of GP</param> /// <param name="logpyX">log p(y|X)</param> public static void Predict(double[] targets, int future_point_count, double sigma_0, double lambda, double sigma_n, out double[] fstar, out double[] Vfstar, out double logpyX) { int n = targets.Length; double[,] cov = GetDataCovar(n, sigma_0, lambda, sigma_n); double mean = targets.Sum() / n; for (int i = 0; i < targets.Length; i++) { targets[i] -= mean; } //column matrix y SparseMatrix y = new SparseMatrix(n, 1, targets); SparseMatrix K = new SparseMatrix(cov); //identity matrix for I SparseMatrix I = SparseMatrix.Identity(targets.Length); //choleski(K + sigman^2*I) var temp_matrix = K.Add(I.Multiply(System.Math.Pow(sigma_n, 2))); var cholesky = temp_matrix.Cholesky(); var L = cholesky.Factor; //inverse of L_transpose() //var L2 = L.LU().L; var L_transpose_inverse = L.Transpose().Inverse(); //inverse of L var L_inverse = L.Inverse(); //alpha = L'\(L\y) var alpha = L_transpose_inverse.Multiply(L_inverse).Multiply(y); double L_diag = 0.0; for (int i = 0; i < L.ColumnCount; i++) { L_diag += System.Math.Log(L[i, i]); } logpyX = -y.Transpose().Multiply(alpha).Multiply(0.5)[0, 0] - L_diag - future_point_count * System.Math.Log(2 * System.Math.PI) * 0.5; fstar = new double[targets.Length + future_point_count]; Vfstar = new double[targets.Length + future_point_count]; for (int i = 0; i < n + future_point_count; i++) { double[] kstar = new double[targets.Length]; for (int j = 0; j < n; j++) { kstar[j] = GetCov_SE(j, i, sigma_0, lambda, sigma_n); } var column_vector_kstar = new SparseMatrix(n, 1, kstar); //f*=k_*^T * alpha fstar[i] = column_vector_kstar.Transpose().Multiply(alpha)[0, 0]; fstar[i] += mean; //v = L\k_* var v = L_inverse.Multiply(column_vector_kstar); //V[fstar] = k(x_*,x_*) - v^T*v Vfstar[i] = GetCov_SE(i, i, sigma_0, lambda, sigma_n) - v.Transpose().Multiply(v)[0, 0] + System.Math.Pow(sigma_n, 2); } }