/// <summary> /// Runs the learning algorithm. /// </summary> /// /// <param name="computeError">True to compute error after the training /// process completes, false otherwise.</param> /// public double Run(bool computeError) { int n = supportVectors.Length; // Create Gram matrix double[,] K = new double[n, n]; for (int i = 0; i < supportVectors.Length; i++) for (int j = 0; j < supportVectors.Length; j++) K[i, j] = machine.Kernel.Function(supportVectors[i], supportVectors[j]); // Reduce to Echelon form to detect linear dependence ReducedRowEchelonForm ech = new ReducedRowEchelonForm(K); var rref = ech.Result; var pivot = ech.Pivot; // For each support vector for (int i = 0; i < supportVectors.Length; i++) { // Get its corresponding row int row = ech.Pivot[i]; // Check if it can be expressed as a // linear combination of other vectors if (row > supportVectors.Length - ech.FreeVariables - 1) { double c = alpha[row]; for (int j = 0; j < supportVectors.Length; j++) alpha[j] = alpha[j] + c * rref[j, row]; alpha[row] = 0; } } // Retain only multipliers which are not zero int[] idx = alpha.Find(a => a != 0); machine.Weights = alpha.Submatrix(idx); machine.SupportVectors = supportVectors.Submatrix(idx); if (computeError) return ComputeError(supportVectors, outputs); return 0; }
public void ReducedRowEchelonFormConstructorTest() { double[,] matrix = { { 1, 2, -3 }, { 3, 5, 9 }, { 5, 9, 3 }, }; ReducedRowEchelonForm target = new ReducedRowEchelonForm(matrix); var actual = target.Result; double[,] expected = { { 1, 0, 33 }, { 0, 1, -18 }, { 0, 0, 0 }, }; Assert.IsTrue(expected.IsEqual(actual)); }
public void ReducedRowEchelonFormConstructorTest2() { double[,] matrix = { {3,2,2,3,1}, {6,4,4,6,2}, {9,6,6,9,1}, }; ReducedRowEchelonForm target = new ReducedRowEchelonForm(matrix); var actual = target.Result; double[,] expected = { { 1, 2/3.0, 2/3.0, 1, 0 }, { 0, 0, 0, 0, 1 }, { 0, 0, 0, 0, 0 }, }; Assert.IsTrue(expected.IsEqual(actual)); }