/// <summary> /// Initializes a new instance of the <see cref="BaseSupportVectorRegression"/> class. /// </summary> /// /// <param name="machine">The machine to be learned.</param> /// <param name="inputs">The input data.</param> /// <param name="outputs">The corresponding output data.</param> /// protected BaseSupportVectorRegression(SupportVectorMachine machine, double[][] inputs, double[] outputs) { // Initial argument checking SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); // Machine this.machine = machine; // Kernel (if applicable) KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { isLinear = true; Linear linear = new Linear(0); kernel = linear; } else { Linear linear = ksvm.Kernel as Linear; isLinear = linear != null && linear.Constant == 0; kernel = ksvm.Kernel; } // Learning data this.inputs = inputs; this.outputs = outputs; }
public void FunctionTest() { Linear dense = new Linear(1); SparseLinear target = new SparseLinear(1); double[] sx = { 1, -0.555556, 2, +0.250000, 3, -0.864407, 4, -0.916667 }; double[] sy = { 1, -0.666667, 2, -0.166667, 3, -0.864407, 4, -0.916667 }; double[] sz = { 1, -0.944444, 3, -0.898305, 4, -0.916667 }; double[] dx = { -0.555556, +0.250000, -0.864407, -0.916667 }; double[] dy = { -0.666667, -0.166667, -0.864407, -0.916667 }; double[] dz = { -0.944444, +0.000000, -0.898305, -0.916667 }; double expected, actual; expected = dense.Function(dx, dy); actual = target.Function(sx, sy); Assert.AreEqual(expected, actual, 1e-10); expected = dense.Function(dx, dz); actual = target.Function(sx, sz); Assert.AreEqual(expected, actual, 1e-10); expected = dense.Function(dy, dz); actual = target.Function(sy, sz); Assert.AreEqual(expected, actual, 1e-10); }
public void RunTest() { Accord.Math.Tools.SetupGenerator(0); // Sample data // The following is a simple auto association function // in which each input correspond to its own class. This // problem should be easily solved using a Linear kernel. // Sample input data double[][] inputs = { new double[] { 0, 0 }, new double[] { 0, 1 }, new double[] { 1, 0 }, new double[] { 1, 1 }, }; // Outputs for each of the inputs int[][] outputs = { // and or nand xor new[] { -1, -1, +1, +1 }, new[] { -1, +1, +1, -1 }, new[] { -1, +1, +1, -1 }, new[] { +1, +1, -1, +1 }, }; // Create a new Linear kernel IKernel linear = new Linear(); // Create a new Multi-class Support Vector Machine for one input, // using the linear kernel and four disjoint classes. var machine = new MultilabelSupportVectorMachine(inputs: 2, kernel: linear, classes: 4); // Create the Multi-class learning algorithm for the machine var teacher = new MultilabelSupportVectorLearning(machine, inputs, outputs); // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) { // Create a hard SVM Complexity = 10000.0 }; // Run the learning algorithm double error = teacher.Run(); // only xor is not learnable by // a hard-margin linear machine Assert.AreEqual(2 / 16.0, error); }
public void RunTest() { Accord.Math.Tools.SetupGenerator(0); // Sample data // The following is a simple auto association function // in which each input correspond to its own class. This // problem should be easily solved using a Linear kernel. // Sample input data double[][] inputs = { new double[] { 0 }, new double[] { 3 }, new double[] { 1 }, new double[] { 2 }, }; // Output for each of the inputs int[] outputs = { 0, 3, 1, 2 }; // Create a new Linear kernel IKernel kernel = new Linear(); // Create a new Multi-class Support Vector Machine for one input, // using the linear kernel and four disjoint classes. var machine = new MulticlassSupportVectorMachine(1, kernel, 4); // Create the Multi-class learning algorithm for the machine var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs); // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); // Run the learning algorithm double error = teacher.Run(); Assert.AreEqual(0, error); Assert.AreEqual(0, machine.Compute(inputs[0])); Assert.AreEqual(3, machine.Compute(inputs[1])); Assert.AreEqual(1, machine.Compute(inputs[2])); Assert.AreEqual(2, machine.Compute(inputs[3])); }
public void Treinar(DadosTreinamento dadosTreinamento) { var kernel = new Linear(1); var quantidadeCaracteristicas = dadosTreinamento.Entradas[0].Length; var quantidadeClasses = dadosTreinamento.Saidas.Distinct().Length; svm = new MulticlassSupportVectorMachine(quantidadeCaracteristicas, kernel, quantidadeClasses); var learning = new MulticlassSupportVectorLearning(svm, dadosTreinamento.Entradas, dadosTreinamento.Saidas) { Algorithm = (machine, inputs, outputs, a, b) => new SequentialMinimalOptimization(machine, inputs, outputs) { Complexity = 1.0 } }; learning.Run(); }
public void RunTest() { Accord.Math.Tools.SetupGenerator(0); var dist = NormalDistribution.Standard; double[] x = { +1.0312479734420776, +0.99444115161895752, +0.21835240721702576, +0.47197291254997253, +0.68701112270355225, -0.58556461334228516, -0.64154046773910522, -0.66485315561294556, +0.37940266728401184, -0.61046308279037476 }; double[][] inputs = Jagged.ColumnVector(x); IKernel kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs: 1); var teacher = new OneclassSupportVectorLearning(machine, inputs) { Nu = 0.1 }; // Run the learning algorithm double error = teacher.Run(); Assert.AreEqual(2, machine.Weights.Length); Assert.AreEqual(0.39198910030993617, machine.Weights[0]); Assert.AreEqual(0.60801089969006383, machine.Weights[1]); Assert.AreEqual(inputs[0][0], machine.SupportVectors[0][0]); Assert.AreEqual(inputs[7][0], machine.SupportVectors[1][0]); }
public void MulticlassSupportVectorMachineConstructorTest() { int inputs = 1; IKernel kernel = new Linear(); int classes = 4; var target = new MulticlassSupportVectorMachine(inputs, kernel, classes); Assert.AreEqual(3, target.Machines.Length); Assert.AreEqual(classes * (classes - 1) / 2, target.Machines[0].Length + target.Machines[1].Length + target.Machines[2].Length); for (int i = 0; i < classes; i++) { for (int j = 0; j < classes; j++) { if (i == j) continue; var machine = target[i, j]; Assert.IsNotNull(machine); } } }
public void TransformTest() { // Using a linear kernel should be equivalent to standard PCA IKernel kernel = new Linear(); // Create analysis var target = new KernelPrincipalComponentAnalysis(data, kernel, AnalysisMethod.Center); // Compute target.Compute(); double[,] actual = target.Transform(data, 2); // first inversed.. ? double[,] expected = new double[,] { { -0.827970186, 0.175115307 }, { 1.77758033, -0.142857227 }, { -0.992197494, -0.384374989 }, { -0.274210416, -0.130417207 }, { -1.67580142, 0.209498461 }, { -0.912949103, -0.175282444 }, { 0.099109437, 0.349824698 }, { 1.14457216, -0.046417258 }, { 0.438046137, -0.017764629 }, { 1.22382056, 0.162675287 }, }; // Verify both are equal with 0.001 tolerance value Assert.IsTrue(Matrix.IsEqual(actual, expected, 0.0001)); // Assert the result equals the transformation of the input double[,] result = target.Result; double[,] projection = target.Transform(data); Assert.IsTrue(Matrix.IsEqual(result, projection, 0.000001)); Assert.AreEqual(2, target.Eigenvalues.Length); Assert.AreEqual(10, target.ComponentMatrix.GetLength(0)); Assert.AreEqual(2, target.ComponentMatrix.GetLength(1)); }
public void DistanceTest() { var linear = new Linear(1); double[] x = { 1, 1 }; double[] y = { 1, 1 }; double actual = linear.Distance(x, y); double expected = 0; Assert.AreEqual(expected, actual); linear = new Linear(11.5); x = new double[] { 0.2, 0.5 }; y = new double[] { 0.3, -0.7 }; actual = linear.Distance(x, y); expected = Accord.Statistics.Tools.Distance(linear, x, y); Assert.AreEqual(expected, actual, 1e-10); }
public void MulticlassSupportVectorMachineConstructorTest2() { int inputs = 1; int classes = 3; IKernel kernel = new Linear(); var target = new MulticlassSupportVectorMachine(inputs, kernel, classes); target[0, 1].Kernel = new Gaussian(0.1); target[0, 2].Kernel = new Linear(); target[1, 2].Kernel = new Polynomial(2); Assert.AreEqual(target[0, 0], target[0, 0]); Assert.AreEqual(target[1, 1], target[1, 1]); Assert.AreEqual(target[2, 2], target[2, 2]); Assert.AreEqual(target[0, 1], target[1, 0]); Assert.AreEqual(target[0, 2], target[0, 2]); Assert.AreEqual(target[1, 2], target[1, 2]); Assert.AreNotEqual(target[0, 1], target[0, 2]); Assert.AreNotEqual(target[1, 2], target[0, 2]); Assert.AreNotEqual(target[1, 2], target[0, 1]); }
public void KernelFunctionCacheConstructorTest() { IKernel kernel = new Linear(1); int cacheSize = 0; KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize); Assert.AreEqual(0, target.Size); Assert.AreEqual(0, target.Hits); Assert.AreEqual(0, target.Misses); for (int i = 0; i < inputs.Length; i++) { double expected = i * i + 1; double actual = target.GetOrCompute(i); Assert.AreEqual(expected, actual); } Assert.AreEqual(0, target.Hits); for (int i = 0; i < inputs.Length; i++) { for (int j = 0; j < inputs.Length; j++) { double expected = i * j + 1; double actual = target.GetOrCompute(i, j); Assert.AreEqual(expected, actual); } } Assert.AreEqual(0, target.Hits); Assert.AreEqual(0, target.Usage); }
public void FunctionTest() { IKernel linear = new Linear(1); double[] x = { 1, 1 }; double[] y = { 1, 1 }; double actual = linear.Function(x, y); double expected = 3; Assert.AreEqual(expected, actual); linear = new Linear(11.5); x = new double[] { 0.2, 5 }; y = new double[] { 3, 0.7 }; actual = linear.Function(x, y); expected = 15.6; Assert.AreEqual(expected, actual); }
public void TransformTest2() { // Using a linear kernel should be equivalent to standard PCA IKernel kernel = new Linear(); // Create analysis KernelPrincipalComponentAnalysis target = new KernelPrincipalComponentAnalysis(data, kernel, AnalysisMethod.Center); // Set the minimum variance threshold to 0.001 target.Threshold = 0.001; // Compute target.Compute(); var r = target.Result; double[,] actual = target.Transform(data, 1); // first inversed.. ? double[,] expected = new double[,] { { -0.827970186 }, { 1.77758033 }, { -0.992197494 }, { -0.274210416 }, { -1.67580142 }, { -0.912949103 }, { 0.099109437 }, { 1.14457216 }, { 0.438046137 }, { 1.22382056 }, }; // Verify both are equal with 0.001 tolerance value Assert.IsTrue(Matrix.IsEqual(actual, expected, 0.0001)); // Assert the result equals the transformation of the input double[,] result = target.Result; double[,] projection = target.Transform(data); Assert.IsTrue(Matrix.IsEqual(result, projection, 0.000001)); }
public void weight_test_homogeneous_linear_kernel() { var dataset = yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); Accord.Math.Tools.SetupGenerator(0); var kernel = new Linear(); Assert.AreEqual(kernel.Constant, 0); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 1; smo.NegativeWeight = 1; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = machine.Decide(inputs[i]) ? 1 : 0; ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(43, matrix.TruePositives); // both classes are Assert.AreEqual(43, matrix.TrueNegatives); // well equilibrated Assert.AreEqual(7, matrix.FalseNegatives); Assert.AreEqual(7, matrix.FalsePositives); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.14, error); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(31, machine.SupportVectors.Length); machine.Compress(); Assert.AreEqual(1, machine.Weights[0]); Assert.AreEqual(1, machine.SupportVectors.Length); Assert.AreEqual(-1.3107402300323954, machine.SupportVectors[0][0]); Assert.AreEqual(-0.5779471529948812, machine.SupportVectors[0][1]); Assert.AreEqual(-0.53366022455811646, machine.Threshold); for (int i = 0; i < actual.Length; i++) { int expected = actual[i]; int y = machine.Decide(inputs[i]) ? 1 : 0; Assert.AreEqual(expected, y); } } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1; smo.PositiveWeight = 100; smo.NegativeWeight = 1; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = machine.Decide(inputs[i]) ? 1 : 0; ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(50, matrix.TruePositives); // has more importance Assert.AreEqual(23, matrix.TrueNegatives); Assert.AreEqual(0, matrix.FalseNegatives); // has more importance Assert.AreEqual(27, matrix.FalsePositives); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(100, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(100, smo.PositiveWeight); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(0.27, error); Assert.AreEqual(42, machine.SupportVectors.Length); } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1; smo.PositiveWeight = 1; smo.NegativeWeight = 100; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = machine.Decide(inputs[i]) ? 1 : 0; var matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(25, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); // has more importance Assert.AreEqual(25, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); // has more importance Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(0.01, smo.WeightRatio); Assert.AreEqual(100, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.25, error); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(40, machine.SupportVectors.Length); } }
public void RunTest3() { double[][] inputs = { // Tickets with the following structure should be assigned to location 0 new double[] { 1, 4, 2, 0, 1 }, // should be assigned to location 0 new double[] { 1, 3, 2, 0, 1 }, // should be assigned to location 0 // Tickets with the following structure should be assigned to location 1 new double[] { 3, 0, 1, 1, 1 }, // should be assigned to location 1 new double[] { 3, 0, 1, 0, 1 }, // should be assigned to location 1 // Tickets with the following structure should be assigned to location 2 new double[] { 0, 5, 5, 5, 5 }, // should be assigned to location 2 new double[] { 1, 5, 5, 5, 5 }, // should be assigned to location 2 // Tickets with the following structure should be assigned to location 3 new double[] { 1, 0, 0, 0, 0 }, // should be assigned to location 3 new double[] { 1, 0, 0, 0, 0 }, // should be assigned to location 3 }; int[] outputs = { 0, 0, // Those are the locations for the first two vectors above 1, 1, // Those are the locations for the next two vectors above 2, 2, // Those are the locations for the next two vectors above 3, 3, // Those are the locations for the last two vectors above }; // Since this is a simplification, a linear machine will suffice: IKernel kernel = new Linear(); // Create the machine for feature vectors of length 5, for 4 possible locations MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(5, kernel, 4); // Create a new learning algorithm to train the machine MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs); // Use the standard SMO algorithm target.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); // Train the machines double actual = target.Run(); // Compute the answer for all training samples for (int i = 0; i < inputs.Length; i++) { double[] answersWeights; double answer = machine.Compute(inputs[i], MulticlassComputeMethod.Voting, out answersWeights); // Assert it has been classified correctly Assert.AreEqual(outputs[i], answer); // Assert the most probable answer is indeed the correct one int imax; Matrix.Max(answersWeights, out imax); Assert.AreEqual(answer, imax); } }
public void RunTest2() { double[][] inputs = { new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; IKernel kernel = new Linear(); MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(4, kernel, 3); MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs); target.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); double actual = target.Run(); double expected = 0; Assert.AreEqual(expected, actual); for (int i = 0; i < inputs.Length; i++) { actual = machine.Compute(inputs[i]); expected = outputs[i]; Assert.AreEqual(expected, actual); } }
public void ComputeTest5() { var dataset = yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.14, error); Assert.AreEqual(30, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; double[] expectedWeights = { -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 0.337065120144639, -1, 1, -0.337065120144639, -1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(7, matrix.FalseNegatives); Assert.AreEqual(7, matrix.FalsePositives); Assert.AreEqual(43, matrix.TruePositives); Assert.AreEqual(43, matrix.TrueNegatives); } { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 0.3; smo.NegativeWeight = 1.0; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(0.3 / 1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(0.3, smo.PositiveWeight); Assert.AreEqual(0.21, error); Assert.AreEqual(24, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; //string str = actualWeights.ToString(Accord.Math.Formats.CSharpArrayFormatProvider.InvariantCulture); double[] expectedWeights = { -0.771026323762095, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -0.928973676237905, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = (int)machine.Compute(inputs[i]); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(50, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(0, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); } { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 1.0; smo.NegativeWeight = 0.3; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0 / 0.3, smo.WeightRatio); Assert.AreEqual(0.3, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.15, error); Assert.AreEqual(19, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; double[] expectedWeights = new double[] { 1, 1, -0.3, 1, -0.3, 1, 1, -0.3, 1, 1, 1, 1, 1, 1, 1, 1, 0.129080057278249, 1, 0.737797469918795 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(0, matrix.FalseNegatives); Assert.AreEqual(50, matrix.FalsePositives); Assert.AreEqual(50, matrix.TruePositives); Assert.AreEqual(0, matrix.TrueNegatives); } }
private static void multilabelsvm() { // Sample data // The following is simple auto association function // where each input correspond to its own class. This // problem should be easily solved by a Linear kernel. // Sample input data double[][] inputs = { new double[] { 0 }, new double[] { 3 }, new double[] { 1 }, new double[] { 2 }, }; // Outputs for each of the inputs int[][] outputs = { new[] { -1, 1, -1 }, new[] { -1, -1, 1 }, new[] { 1, 1, -1 }, new[] { -1, -1, -1 }, }; // Create a new Linear kernel IKernel kernel = new Linear(); // Create a new Multi-class Support Vector Machine with one input, // using the linear kernel and for four disjoint classes. var machine = new MultilabelSupportVectorMachine(1, kernel, 3); // Create the Multi-label learning algorithm for the machine var teacher = new MultilabelSupportVectorLearning(machine, inputs, outputs); // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) { // Create a hard SVM Complexity = 10000.0 }; // Run the learning algorithm double error = teacher.Run(); int[][] answers = inputs.Apply(machine.Compute); }
public void RevertTest() { // Using a linear kernel should be equivalent to standard PCA IKernel kernel = new Linear(); // Create analysis KernelPrincipalComponentAnalysis target = new KernelPrincipalComponentAnalysis(data, kernel, AnalysisMethod.Center); // Compute target.Compute(); // Compute image double[,] image = target.Transform(data, 2); // Compute pre-image double[,] preimage = target.Revert(image); // Check if pre-image equals the original data Assert.IsTrue(Matrix.IsEqual(data, preimage, 0.0001)); }
public void WeightsTest1() { var dataset = yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); Accord.Math.Tools.SetupGenerator(0); var kernel = new Linear(1); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 1; smo.NegativeWeight = 1; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(43, matrix.TruePositives); // both classes are Assert.AreEqual(43, matrix.TrueNegatives); // well equilibrated Assert.AreEqual(7, matrix.FalseNegatives); Assert.AreEqual(7, matrix.FalsePositives); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.14, error); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(31, machine.SupportVectors.Length); } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1; smo.PositiveWeight = 100; smo.NegativeWeight = 1; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(50, matrix.TruePositives); // has more importance Assert.AreEqual(23, matrix.TrueNegatives); Assert.AreEqual(0, matrix.FalseNegatives); // has more importance Assert.AreEqual(27, matrix.FalsePositives); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(100, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(100, smo.PositiveWeight); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(0.27, error); Assert.AreEqual(41, machine.SupportVectors.Length); } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1; smo.PositiveWeight = 1; smo.NegativeWeight = 100; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); var matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(25, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); // has more importance Assert.AreEqual(25, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); // has more importance Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(0.01, smo.WeightRatio); Assert.AreEqual(100, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.25, error); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(40, machine.SupportVectors.Length); } }
public void ClassifyTest() { // Create some sample input data instances. This is the same // data used in the Gutierrez-Osuna's example available on: // http://research.cs.tamu.edu/prism/lectures/pr/pr_l10.pdf double[][] inputs = { // Class 0 new double[] { 4, 1 }, new double[] { 2, 4 }, new double[] { 2, 3 }, new double[] { 3, 6 }, new double[] { 4, 4 }, // Class 1 new double[] { 9, 10 }, new double[] { 6, 8 }, new double[] { 9, 5 }, new double[] { 8, 7 }, new double[] { 10, 8 } }; int[] output = { 0, 0, 0, 0, 0, // The first five are from class 0 1, 1, 1, 1, 1 // The last five are from class 1 }; // Now we can chose a kernel function to // use, such as a linear kernel function. IKernel kernel = new Linear(); // Then, we will create a KDA using this linear kernel. var kda = new KernelDiscriminantAnalysis(inputs, output, kernel); kda.Compute(); // Compute the analysis // Now we can project the data into KDA space: double[][] projection = kda.Transform(inputs); // Or perform classification using: int[] results = kda.Classify(inputs); // Test the classify method for (int i = 0; i < 5; i++) { int expected = 0; int actual = results[i]; Assert.AreEqual(expected, actual); } for (int i = 5; i < 10; i++) { int expected = 1; int actual = results[i]; Assert.AreEqual(expected, actual); } }
public void ExpandDistanceTest() { Linear kernel = new Linear(42); var x = new double[] { 0.5, 2.0 }; var y = new double[] { 1.3, -0.2 }; var phi_x = kernel.Transform(x); var phi_y = kernel.Transform(y); double phi_d = Distance.SquareEuclidean(phi_x, phi_y); double d = kernel.Distance(x, y); Assert.AreEqual(phi_d, d); }
public void FunctionTest2() { double constant = 0.1; Linear target = new Linear(constant); double[] x = { 2.0, 3.1, 4.0 }; double[] y = { 2.0, 3.1, 4.0 }; double expected = Matrix.InnerProduct(x, y) + constant; double actual; actual = target.Function(x, y); Assert.AreEqual(expected, actual); actual = target.Function(x, x); Assert.AreEqual(expected, actual); actual = target.Function(y, y); Assert.AreEqual(expected, actual); }
public void ReverseDistanceTest() { var linear = new Linear(1); double[] x = { 1, 1 }; double[] y = { 1, 1 }; double actual = linear.ReverseDistance(x, y); double expected = 0; Assert.AreEqual(expected, actual); linear = new Linear(0); x = new double[] { 0.2, 0.5 }; y = new double[] { 0.3, -0.7 }; actual = linear.ReverseDistance(x, y); expected = Accord.Math.Distance.SquareEuclidean(x, y); Assert.AreEqual(expected, actual); }
public void TransformTest_Linear() { double[][] data = { new double[] { 5.1, 3.5, 1.4, 0.2 }, new double[] { 5.0, 3.6, 1.4, 0.2 }, new double[] { 4.9, 3.0, 1.4, 0.2 }, new double[] { 5.8, 4.0, 1.2, 0.2 }, new double[] { 4.7, 3.2, 1.3, 0.2 }, }; var target = new Polynomial(1); var linear = new Linear(); double[][] expected = data.Apply(linear.Transform); double[][] actual = data.Apply(target.Transform); Assert.IsTrue(expected.IsEqual(actual, 1e-10)); }
public void ComplexityHeuristicTest() { var dataset = yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); var linear = new SupportVectorMachine(inputs[0].Length); Linear kernel = new Linear(0); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo1 = new SequentialMinimalOptimization(machine, inputs, labels); smo1.UseClassProportions = true; smo1.UseComplexityHeuristic = true; double e1 = smo1.Run(); var smo2 = new SequentialMinimalOptimization(linear, inputs, labels); smo2.UseClassProportions = true; smo2.UseComplexityHeuristic = true; double e2 = smo2.Run(); Assert.AreEqual(smo1.Complexity, smo2.Complexity); Assert.AreEqual(e1, e2); }
public void SerializeTest1() { double[][] inputs = { new double[] { 1, 4, 2, 0, 1 }, new double[] { 1, 3, 2, 0, 1 }, new double[] { 3, 0, 1, 1, 1 }, new double[] { 3, 0, 1, 0, 1 }, new double[] { 0, 5, 5, 5, 5 }, new double[] { 1, 5, 5, 5, 5 }, new double[] { 1, 0, 0, 0, 0 }, new double[] { 1, 0, 0, 0, 0 }, }; int[] outputs = { 0, 0, 1, 1, 2, 2, 3, 3, }; IKernel kernel = new Linear(); var msvm = new MulticlassSupportVectorMachine(5, kernel, 4); var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs); smo.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); double expected = smo.Run(); MemoryStream stream = new MemoryStream(); // Save the machines msvm.Save(stream); // Rewind stream.Seek(0, SeekOrigin.Begin); // Reload the machines var target = MulticlassSupportVectorMachine.Load(stream); double actual; int count = 0; // Compute errors for (int i = 0; i < inputs.Length; i++) { double y = target.Compute(inputs[i]); if (y != outputs[i]) count++; } actual = (double)count / inputs.Length; Assert.AreEqual(expected, actual); Assert.AreEqual(msvm.Inputs, target.Inputs); Assert.AreEqual(msvm.Classes, target.Classes); for (int i = 0; i < msvm.Machines.Length; i++) { for (int j = 0; j < msvm.Machines.Length; j++) { var a = msvm[i, j]; var b = target[i, j]; if (i != j) { Assert.IsTrue(a.SupportVectors.IsEqual(b.SupportVectors)); } else { Assert.IsNull(a); Assert.IsNull(b); } } } }
public void RunTest2() { double[][] inputs = { new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; IKernel kernel = new Linear(); MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(4, kernel, 3); MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs); target.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); double error1 = target.Run(); Assert.AreEqual(0, error1); target.Algorithm = (svm, classInputs, classOutputs, i, j) => new ProbabilisticOutputCalibration(svm, classInputs, classOutputs); double error2 = target.Run(); Assert.AreEqual(0, error2); }
public void ExpandReverseDistanceTest() { Linear kernel = new Linear(42); var x = new double[] { 0.5, 2.0 }; var y = new double[] { 1.3, -0.2 }; var phi_x = kernel.Transform(x); var phi_y = kernel.Transform(y); double d = Distance.SquareEuclidean(x, y); double phi_d = kernel.ReverseDistance(phi_x, phi_y); Assert.AreEqual(phi_d, d, 1e-10); Assert.IsFalse(double.IsNaN(phi_d)); Assert.IsFalse(double.IsNaN(d)); }
/// <summary> /// Initializes a new instance of a Sequential Minimal Optimization (SMO) algorithm. /// </summary> /// /// <param name="machine">A Support Vector Machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The classification label for each data point in the range [-1;+1].</param> /// public SequentialMinimalOptimization(SupportVectorMachine machine, double[][] inputs, int[] outputs) { // Initial argument checking if (machine == null) throw new ArgumentNullException("machine"); if (inputs == null) throw new ArgumentNullException("inputs"); if (outputs == null) throw new ArgumentNullException("outputs"); if (inputs.Length != outputs.Length) throw new ArgumentException("The number of inputs and outputs does not match.", "outputs"); for (int i = 0; i < outputs.Length; i++) { if (outputs[i] != 1 && outputs[i] != -1) throw new ArgumentOutOfRangeException("outputs", "One of the labels in the output vector is neither +1 or -1."); } if (machine.Inputs > 0) { // This machine has a fixed input vector size for (int i = 0; i < inputs.Length; i++) if (inputs[i].Length != machine.Inputs) throw new ArgumentException("The size of the input vectors does not match the expected number of inputs of the machine"); } // Machine this.machine = machine; // Kernel (if applicable) KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { isLinear = true; Linear linear = new Linear(); kernel = linear; } else { Linear linear = ksvm.Kernel as Linear; isLinear = linear != null; kernel = ksvm.Kernel; } // Learning data this.inputs = inputs; this.outputs = outputs; int samples = inputs.Length; int dimension = inputs[0].Length; // Lagrange multipliers this.alpha = new double[inputs.Length]; if (isLinear) // Hyperplane weights this.weights = new double[dimension]; // Error cache this.errors = new double[samples]; }