/// <summary> /// Creates a new <see cref="SupportVectorReduction"/> algorithm. /// </summary> /// /// <param name="machine">The machine to be reducted.</param> /// public SupportVectorReduction(KernelSupportVectorMachine machine) { this.machine = machine; this.supportVectors = machine.SupportVectors; this.outputs = machine.Weights.Sign().ToInt32(); this.alpha = (double[])machine.Weights.Clone(); this.kernel = machine.Kernel; }
public void LargeLearningTest1() { // Create large input vectors int rows = 1000; int dimension = 10000; double[][] inputs = new double[rows][]; int[] outputs = new int[rows]; Random rnd = new Random(); for (int i = 0; i < inputs.Length; i++) { inputs[i] = new double[dimension]; if (i > rows / 2) { for (int j = 0; j < dimension; j++) { inputs[i][j] = rnd.NextDouble(); } outputs[i] = -1; } else { for (int j = 0; j < dimension; j++) { inputs[i][j] = rnd.NextDouble() * 4.21 + 5; } outputs[i] = +1; } } KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Polynomial(2), dimension); SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs) { UseComplexityHeuristic = true }; double error = smo.Run(); Assert.AreEqual(0, error); }
public static void GridSearch(double[][] inputs, int[] outputs) { GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 0.001, 5.20, 0.30, 0.50, 20, 50, 100, 100 }), new GridSearchRange("degree", new double[] { 1, 2, 3, 4, 5,10 }), new GridSearchRange("constant", new double[] { 0, 1, 2 }), new GridSearchRange("sigma", new double[] { 0.1, 0.25, 0.5, 1, 2, 5 }) }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <KernelSupportVectorMachine>(ranges); // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. int degree = (int)parameters["degree"].Value; double constant = parameters["constant"].Value; double complexity = parameters["complexity"].Value; double sigma = parameters["sigma"].Value; // Use the parameters to build the SVM model //Polynomial kernel = new Polynomial(degree, constant); //Gaussian kernel = new Gaussian(sigma); Gaussian kernel = new Gaussian(sigma); KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(kernel, 2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(ksvm, inputs, outputs); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return(ksvm); // Return the current model }; // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine KernelSupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); }
public void KernelTest2() { var dataset = SequentialMinimalOptimizationTest.yinyang; var inputs = dataset.Submatrix(null, 0, 1).ToArray(); var labels = dataset.GetColumn(2).ToInt32(); var svm = new KernelSupportVectorMachine(new Linear(1), inputs: 2); bool thrown = false; try { new ProbabilisticCoordinateDescent(svm, inputs, labels); } catch (ArgumentException) { thrown = true; } Assert.IsTrue(thrown); }
public void ComputeTest() { // Example AND problem double[][] inputs = { new double[] { 0, 0 }, // 0 and 0: 0 (label -1) new double[] { 0, 1 }, // 0 and 1: 0 (label -1) new double[] { 1, 0 }, // 1 and 0: 0 (label -1) new double[] { 1, 1 } // 1 and 1: 1 (label +1) }; // Dichotomy SVM outputs should be given as [-1;+1] int[] labels = { // 0, 0, 0, 1 -1, -1, -1, 1 }; // Create a Support Vector Machine for the given inputs KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length); // Instantiate a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(machine, inputs, labels); // Set up the learning algorithm smo.Complexity = 1.0; // Run double error = smo.Run(); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2]))); Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3]))); Assert.AreEqual(error, 0); Assert.AreEqual(-0.6640625, machine.Threshold); Assert.AreEqual(1, machine.Weights[0]); Assert.AreEqual(-0.34375, machine.Weights[1]); Assert.AreEqual(-0.328125, machine.Weights[2]); Assert.AreEqual(-0.328125, machine.Weights[3]); }
private static void testWeights(double[][] inputs, int[] labels, IKernel kernel) { { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.PositiveWeight = 100; smo.NegativeWeight = 1; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(50, matrix.TruePositives); // has more importance Assert.AreEqual(0, matrix.FalseNegatives); // has more importance } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.PositiveWeight = 1; smo.NegativeWeight = 100; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } var matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(50, matrix.TrueNegatives); // has more importance Assert.AreEqual(0, matrix.FalsePositives); // has more importance } }
public void ComputeTest5() { double[][] inputs = training.Submatrix(null, 0, 3); int[] labels = Tools.Scale(0, 1, -1, 1, training.GetColumn(4)).ToInt32(); Gaussian kernel = Gaussian.Estimate(inputs); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.UseClassProportions = true; double error = smo.Run(); Assert.AreEqual(1, smo.Complexity); Assert.AreEqual(0.4, smo.PositiveWeight); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(0.4, smo.WeightRatio, 1e-10); Assert.AreEqual(0.38095238095238093, error); Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma); Assert.AreEqual(32, machine.SupportVectors.Length); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(7, matrix.FalseNegatives); Assert.AreEqual(9, matrix.FalsePositives); Assert.AreEqual(5, matrix.TruePositives); Assert.AreEqual(21, matrix.TrueNegatives); Assert.AreEqual(0.41666666666666669, matrix.Sensitivity); Assert.AreEqual(0.7, matrix.Specificity); }
public void LargeLearningTest1() { // Create large input vectors int rows = 1000; int dimension = 10000; double[][] inputs = new double[rows][]; int[] outputs = new int[rows]; Random rnd = new Random(); for (int i = 0; i < inputs.Length; i++) { inputs[i] = new double[dimension]; if (i > rows / 2) { for (int j = 0; j < dimension; j++) { inputs[i][j] = rnd.NextDouble(); } outputs[i] = -1; } else { for (int j = 0; j < dimension; j++) { inputs[i][j] = rnd.NextDouble() * 4.21 + 5; } outputs[i] = +1; } } KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Polynomial(2), dimension); LeastSquaresLearning smo = new LeastSquaresLearning(svm, inputs, outputs); double error = smo.Run(); Assert.AreEqual(0, error); }
public void KernelTest1() { var dataset = SequentialMinimalOptimizationTest.GetYingYang(); double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); double e1, e2; double[] w1, w2; { Accord.Math.Random.Generator.Seed = 0; var svm = new SupportVectorMachine(inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; e1 = teacher.Run(); w1 = svm.ToWeights(); } { Accord.Math.Random.Generator.Seed = 0; var svm = new KernelSupportVectorMachine(new Linear(0), inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; e2 = teacher.Run(); w2 = svm.ToWeights(); } Assert.AreEqual(e1, e2); Assert.AreEqual(w1.Length, w2.Length); Assert.AreEqual(w1[0], w2[0], 1e-8); Assert.AreEqual(w1[1], w2[1], 1e-8); Assert.AreEqual(w1[2], w2[2], 1e-8); }
public void different_lengths_same_error() { // GH-191: Different accuracy by specifying KernelSupportVectorMachine // input length https://github.com/accord-net/framework/issues/191 var dataset = SequentialMinimalOptimizationTest.GetYingYang(); double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] outputs = dataset.GetColumn(2).ToInt32(); var machine1 = new KernelSupportVectorMachine(new Linear(), inputs[0].Length); var teacher1 = new LinearDualCoordinateDescent(machine1, inputs, outputs); var error1 = teacher1.Run(true); var machine2 = new KernelSupportVectorMachine(new Linear(), 0); var teacher2 = new LinearDualCoordinateDescent(machine2, inputs, outputs); var error2 = teacher2.Run(true); Assert.AreEqual(error1, error2); }
public void RunTest() { Accord.Math.Tools.SetupGenerator(0); var dist = NormalDistribution.Standard; double[] x = { +1.0312479734420776, +0.99444115161895752, +0.21835240721702576, +0.47197291254997253, +0.68701112270355225, -0.58556461334228516, -0.64154046773910522, -0.66485315561294556, +0.37940266728401184, -0.61046308279037476 }; double[][] inputs = Jagged.ColumnVector(x); IKernel kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs: 1); var teacher = new OneclassSupportVectorLearning(machine, inputs) { Nu = 0.1 }; // Run the learning algorithm double error = teacher.Run(); Assert.AreEqual(2, machine.Weights.Length); Assert.AreEqual(0.39198910030993617, machine.Weights[0]); Assert.AreEqual(0.60801089969006383, machine.Weights[1]); Assert.AreEqual(inputs[0][0], machine.SupportVectors[0][0]); Assert.AreEqual(inputs[7][0], machine.SupportVectors[1][0]); Assert.AreEqual(0.0, error, 1e-10); }
/// <summary> /// Constructs a new Least Squares SVM (LS-SVM) learning algorithm. /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public LeastSquaresLearning(SupportVectorMachine machine, double[][] inputs, int[] outputs) { SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); // Set the machine this.machine = machine; // Grab the machine kernel KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; this.kernel = (ksvm == null) ? new Linear() : ksvm.Kernel; // Kernel cache this.cacheSize = inputs.Length; // Get learning data this.inputs = inputs; this.outputs = outputs; this.ones = Matrix.Vector(outputs.Length, 1); }
public void SVMTestXOR() { List <DataSet> dataset = new List <DataSet>(); dataset.Add(new DataSet(-1, new double[] { -1, -1 })); dataset.Add(new DataSet(1, new double[] { -1, 1 })); dataset.Add(new DataSet(1, new double[] { 1, -1 })); dataset.Add(new DataSet(-1, new double[] { 1, 1 })); KernelSupportVectorMachine machine = new KernelSupportVectorMachine( new Polynomial(2), 2); var learn = new SequentialMinimalOptimization(machine, dataset.ToArray()); double[] error = learn.Run(); double[] output = machine.Compute(dataset.ToArray()); double[] expected = { -1, 1, 1, -1 }; CollectionAssert.AreEqual(expected, output); }
public static TModel Create <TModel, TInput, TKernel>(int inputs, TKernel kernel) where TModel : class, ISupportVectorMachine <TInput> where TKernel : IKernel <TInput> #if !NETSTANDARD1_4 where TInput : ICloneable #endif { TModel result = null; var type = typeof(TModel); if (type == typeof(SupportVectorMachine)) { result = new SupportVectorMachine(inputs) as TModel; } if (type == typeof(SupportVectorMachine <IKernel>)) { result = new SupportVectorMachine <IKernel>(inputs, kernel as IKernel) as TModel; } if (type == typeof(SupportVectorMachine <IKernel <double[]> >)) { result = new SupportVectorMachine <IKernel <double[]> >(inputs, kernel as IKernel <double[]>) as TModel; } #if !NETSTANDARD1_4 #pragma warning disable 0618 else if (type == typeof(KernelSupportVectorMachine)) { result = new KernelSupportVectorMachine(kernel as IKernel, inputs) as TModel; } #pragma warning restore 0618 #endif else if (type == typeof(SupportVectorMachine <TKernel, TInput>)) { result = new SupportVectorMachine <TKernel, TInput>(inputs, kernel) as TModel; } if (result == null) { throw new NotSupportedException("If you are implementing your own support vector machine type, please override the Create method in your learning algorithm to instruct the framework how to instantiate a type of your new class."); } return(result); }
public void LearnTest() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = { -1, 1, 1, -1 }; // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length); // Create the Least Squares Support Vector Machine teacher LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, xor); learn.Complexity = 10; // Run the learning algorithm learn.Run(); int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p))); for (int i = 0; i < output.Length; i++) { Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i])); } }
public void UseClassProportionsTest() { var dataset = KernelSupportVectorMachineTest.training; var inputs = dataset.Submatrix(null, 0, 3); var labels = Accord.Math.Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32(); Gaussian kernel = Gaussian.Estimate(inputs); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.UseClassProportions = true; double error = smo.Run(); Assert.AreEqual(1, smo.Complexity); Assert.AreEqual(0.4, smo.PositiveWeight); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(0.4, smo.WeightRatio, 1e-10); Assert.AreEqual(0.2857142857142857, error); Assert.AreEqual(265.78327637381551, ((Gaussian)machine.Kernel).Sigma); Assert.AreEqual(26, machine.SupportVectors.Length); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(12, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(0, matrix.TruePositives); Assert.AreEqual(30, matrix.TrueNegatives); }
public void LearnTest2() { double[][] inputs = yinyang.Submatrix(null, 0, 1).ToJagged(); int[] outputs = yinyang.GetColumn(2).ToInt32(); // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(3), inputs[0].Length); // Create the Least Squares Support Vector Machine teacher LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, outputs); learn.Complexity = 1 / 0.1; // Run the learning algorithm learn.Run(); int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p))); for (int i = 0; i < output.Length; i++) { Assert.AreEqual(System.Math.Sign(outputs[i]), System.Math.Sign(output[i])); } }
private void createSVM() { createKernel(); // Create SVM for n input variables, where n is the number of features (columns) _svm = new KernelSupportVectorMachine(_kernel, inputs: _inputs[0].Length); // Create an instance of the SMO learning algorithm _smo = new SequentialMinimalOptimization(_svm, _inputs, _outputs) { // Set learning parameters Tolerance = _configuration.Tolerance, PositiveWeight = _configuration.WeightPositiveClass, NegativeWeight = _configuration.WeightNegativeClass, UseClassProportions = _configuration.UseComputedWeights, UseComplexityHeuristic = true }; //if (!_configuration.UseHeuristicalComplexity) _smo.Complexity = _configuration.Complexity; Console.WriteLine("SVM> C=" + _smo.Complexity + ", Tolerance=" + _smo.Tolerance + ", PosW=" + _smo.PositiveWeight + ", NegW: " + _smo.NegativeWeight); }
public void ComputeTest5() { var dataset = yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.14, error); Assert.AreEqual(30, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; double[] expectedWeights = { -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 0.337065120144639, -1, 1, -0.337065120144639, -1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(7, matrix.FalseNegatives); Assert.AreEqual(7, matrix.FalsePositives); Assert.AreEqual(43, matrix.TruePositives); Assert.AreEqual(43, matrix.TrueNegatives); } { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 0.3; smo.NegativeWeight = 1.0; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(0.3 / 1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(0.3, smo.PositiveWeight); Assert.AreEqual(0.21, error); Assert.AreEqual(24, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; //string str = actualWeights.ToString(Accord.Math.Formats.CSharpArrayFormatProvider.InvariantCulture); double[] expectedWeights = { -0.771026323762095, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -0.928973676237905, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = (int)machine.Compute(inputs[i]); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(50, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(0, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); } { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 1.0; smo.NegativeWeight = 0.3; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0 / 0.3, smo.WeightRatio); Assert.AreEqual(0.3, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.15, error); Assert.AreEqual(19, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; double[] expectedWeights = new double[] { 1, 1, -0.3, 1, -0.3, 1, 1, -0.3, 1, 1, 1, 1, 1, 1, 1, 1, 0.129080057278249, 1, 0.737797469918795 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(0, matrix.FalseNegatives); Assert.AreEqual(50, matrix.FalsePositives); Assert.AreEqual(50, matrix.TruePositives); Assert.AreEqual(0, matrix.TrueNegatives); } }
public void ComputeTest() { // Example AND problem double[][] inputs = { new double[] { 0, 0 }, // 0 and 0: 0 (label -1) new double[] { 0, 1 }, // 0 and 1: 0 (label -1) new double[] { 1, 0 }, // 1 and 0: 0 (label -1) new double[] { 1, 1 } // 1 and 1: 1 (label +1) }; // Dichotomy SVM outputs should be given as [-1;+1] int[] labels = { // 0, 0, 0, 1 -1, -1, -1, 1 }; // Create a Support Vector Machine for the given inputs KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Linear(0), inputs[0].Length); // Instantiate a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(machine, inputs, labels); // Set up the learning algorithm smo.Complexity = 100.0; // Run double error = smo.Run(); Assert.AreEqual(0, error); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2]))); Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3]))); // At this point we have the weighted support vectors // w sv b // (+4) * (1,1) -3 // (-2) * (1,0) // (-2) * (0,1) // // However, it can be seen that the last SV can be written // as a linear combination of the two first vectors: // // (0,1) = (1,1) - (1,0) // // Since we have a linear space (we are using a linear kernel) // this vector could be removed from the support vector set. // // f(x) = sum(alpha_i * x * x_i) + b // = 4*(1,1)*x - 2*(1,0)*x - 2*(0,1)*x - 3 // = 4*(1,1)*x - 2*(1,0)*x - 2*((1,1) - (1,0))*x - 3 // = 4*(1,1)*x - 2*(1,0)*x - 2*(1,1)*x + 2*(1,0)*x - 3 // = 4*(1,1)*x - 2*(1,0)*x - 2*(1,1)*x + 2*(1,0)*x - 3 // = 2*(1,1)*x - 3 // = 2*x1 + 2*x2 - 3 // SupportVectorReduction svr = new SupportVectorReduction(machine); double error2 = svr.Run(); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2]))); Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3]))); }
public void WeightRatioTest() { var dataset = KernelSupportVectorMachineTest.training; var inputs = dataset.Submatrix(null, 0, 3); var labels = Accord.Math.Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32(); Gaussian kernel = Gaussian.Estimate(inputs); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.WeightRatio = 10; double error = smo.Run(); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.1, smo.NegativeWeight); Assert.AreEqual(0.7142857142857143, error); Assert.AreEqual(265.78327637381551, ((Gaussian)machine.Kernel).Sigma); Assert.AreEqual(39, machine.SupportVectors.Length); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(12, matrix.TruePositives); // has more importance Assert.AreEqual(0, matrix.FalseNegatives); // has more importance Assert.AreEqual(30, matrix.FalsePositives); Assert.AreEqual(0, matrix.TrueNegatives); Assert.AreEqual(1.0, matrix.Sensitivity); Assert.AreEqual(0.0, matrix.Specificity); Assert.AreEqual(0.44444444444444448, matrix.FScore); Assert.AreEqual(0.0, matrix.MatthewsCorrelationCoefficient); } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.WeightRatio = 0.1; double error = smo.Run(); Assert.AreEqual(0.1, smo.PositiveWeight); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(0.21428571428571427, error); Assert.AreEqual(265.78327637381551, ((Gaussian)machine.Kernel).Sigma); Assert.AreEqual(18, machine.SupportVectors.Length); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(8, matrix.FalseNegatives); Assert.AreEqual(1, matrix.FalsePositives); // has more importance Assert.AreEqual(4, matrix.TruePositives); Assert.AreEqual(29, matrix.TrueNegatives); // has more importance Assert.AreEqual(0.33333333333333331, matrix.Sensitivity); Assert.AreEqual(0.96666666666666667, matrix.Specificity); Assert.AreEqual(0.47058823529411764, matrix.FScore); Assert.AreEqual(0.41849149947774944, matrix.MatthewsCorrelationCoefficient); } }
public void ComputeTest5() { var dataset = SequentialMinimalOptimizationTest.GetYingYang(); double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); var kernel = new Polynomial(2, 0); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.UseComplexityHeuristic = true; double error = smo.Run(); Assert.AreEqual(0.2, error); Assert.AreEqual(0.11714451552090824, smo.Complexity); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(20, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(30, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); } { Accord.Math.Tools.SetupGenerator(0); var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); var smo = new LinearDualCoordinateDescent(machine, projection, labels); smo.UseComplexityHeuristic = true; smo.Tolerance = 0.01; double error = smo.Run(); Assert.AreEqual(0.18, error); Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(projection[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(17, matrix.FalseNegatives); Assert.AreEqual(1, matrix.FalsePositives); Assert.AreEqual(33, matrix.TruePositives); Assert.AreEqual(49, matrix.TrueNegatives); } { Accord.Math.Random.Generator.Seed = 0; var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); var smo = new LinearDualCoordinateDescent(machine, projection, labels); smo.UseComplexityHeuristic = true; smo.Loss = Loss.L1; double error = smo.Run(); Assert.AreEqual(0.2, error); Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(kernel.Transform(inputs[i]))); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(20, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(30, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); } }
public void weight_test_homogeneous_linear_kernel() { var dataset = yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); Accord.Math.Tools.SetupGenerator(0); var kernel = new Linear(); Assert.AreEqual(kernel.Constant, 0); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 1; smo.NegativeWeight = 1; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = machine.Decide(inputs[i]) ? 1 : 0; } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(43, matrix.TruePositives); // both classes are Assert.AreEqual(43, matrix.TrueNegatives); // well equilibrated Assert.AreEqual(7, matrix.FalseNegatives); Assert.AreEqual(7, matrix.FalsePositives); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.14, error); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(31, machine.SupportVectors.Length); machine.Compress(); Assert.AreEqual(1, machine.Weights[0]); Assert.AreEqual(1, machine.SupportVectors.Length); Assert.AreEqual(-1.3107402300323954, machine.SupportVectors[0][0]); Assert.AreEqual(-0.5779471529948812, machine.SupportVectors[0][1]); Assert.AreEqual(-0.53366022455811646, machine.Threshold); for (int i = 0; i < actual.Length; i++) { int expected = actual[i]; int y = machine.Decide(inputs[i]) ? 1 : 0; Assert.AreEqual(expected, y); } } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1; smo.PositiveWeight = 100; smo.NegativeWeight = 1; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = machine.Decide(inputs[i]) ? 1 : 0; } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(50, matrix.TruePositives); // has more importance Assert.AreEqual(23, matrix.TrueNegatives); Assert.AreEqual(0, matrix.FalseNegatives); // has more importance Assert.AreEqual(27, matrix.FalsePositives); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(100, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(100, smo.PositiveWeight); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(0.27, error); Assert.AreEqual(42, machine.SupportVectors.Length); } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1; smo.PositiveWeight = 1; smo.NegativeWeight = 100; smo.Tolerance = 0.001; double error = smo.Run(); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = machine.Decide(inputs[i]) ? 1 : 0; } var matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(25, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); // has more importance Assert.AreEqual(25, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); // has more importance Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(0.01, smo.WeightRatio); Assert.AreEqual(100, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.25, error); Assert.AreEqual(0.001, smo.Tolerance); Assert.AreEqual(40, machine.SupportVectors.Length); } }
public OneclassSupportVectorLearning(KernelSupportVectorMachine model, double[][] input) : base(model, input) { }
public void RunTest1() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] outputs = { -1, 1, 1, -1 }; KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(3.6), 2); var smo = new SequentialMinimalOptimization(svm, inputs, outputs); double error1 = smo.Run(); Assert.AreEqual(0, error1); double[] distances = new double[outputs.Length]; for (int i = 0; i < outputs.Length; i++) { int y = svm.Compute(inputs[i], out distances[i]); Assert.AreEqual(outputs[i], y); } var target = new ProbabilisticOutputCalibration(svm, inputs, outputs); double ll0 = target.LogLikelihood(inputs, outputs); double ll1 = target.Run(); double ll2 = target.LogLikelihood(inputs, outputs); Assert.AreEqual(5.5451735748694571, ll1); Assert.AreEqual(ll1, ll2); Assert.IsTrue(ll1 > ll0); double[] newdistances = new double[outputs.Length]; for (int i = 0; i < outputs.Length; i++) { int y = svm.Compute(inputs[i], out newdistances[i]); Assert.AreEqual(outputs[i], y); } double[] probs = new double[outputs.Length]; for (int i = 0; i < outputs.Length; i++) { int y; probs[i] = svm.ToMulticlass().Probability(inputs[i], out y); Assert.AreEqual(outputs[i], y == 1 ? 1 : -1); } Assert.AreEqual(0.25, probs[0], 1e-5); Assert.AreEqual(0.75, probs[1], 1e-5); Assert.AreEqual(0.75, probs[2], 1e-5); Assert.AreEqual(0.25, probs[3], 1e-5); foreach (var p in probs) { Assert.IsFalse(Double.IsNaN(p)); } }
public void DynamicalTimeWarpingConstructorTest() { double[][] sequences = { new double[] // -1 { 0, 0, 0, 1, 1, 1, 2, 2, 2, }, new double[] // -1 { 0, 1, 0, 0, 2, 0, 0, 3, 0 }, new double[] // +1 { 1, 1, 0, 1, 2, 0, 2, 1, 0, }, new double[] // +1 { 0, 0, 1, 0, 0, 2, 0, 1, 3, }, }; int[] outputs = { -1, -1, +1, +1 }; // Set the parameters of the kernel double alpha = 0.85; int innerVectorLength = 3; // Create the kernel. Note that the input vector will be given out automatically DynamicTimeWarping target = new DynamicTimeWarping(innerVectorLength, alpha); // When using variable-length kernels, specify 0 as the input length. KernelSupportVectorMachine svm = new KernelSupportVectorMachine(target, 0); // Create the Sequential Minimal Optimization as usual SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, sequences, outputs); smo.Complexity = 1.5; double error = smo.Run(); // Computing the training values var a0 = svm.Compute(sequences[0]); var a1 = svm.Compute(sequences[1]); var a2 = svm.Compute(sequences[2]); var a3 = svm.Compute(sequences[3]); Assert.AreEqual(-1, System.Math.Sign(a0)); Assert.AreEqual(-1, System.Math.Sign(a1)); Assert.AreEqual(+1, System.Math.Sign(a2)); Assert.AreEqual(+1, System.Math.Sign(a3)); // Computing a new testing value double[] test = { 1, 0, 1, 0, 0, 2, 0, 1, 3, }; var a4 = svm.Compute(test); }
public void GridsearchConstructorTest() { Accord.Math.Random.Generator.Seed = 0; // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 0.50 }), new GridSearchRange("degree", new double[] { 1, 10, 2,3, 4, 5 }), new GridSearchRange("constant", new double[] { 0, 1, 2 }) }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <KernelSupportVectorMachine>(ranges); #if DEBUG gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; #endif // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. int degree = (int)parameters["degree"].Value; double constant = parameters["constant"].Value; double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model Polynomial kernel = new Polynomial(degree, constant); KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(kernel, 2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(ksvm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return(ksvm); // Return the current model }; // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine KernelSupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // A linear kernel can't solve the xor problem. Assert.AreEqual(1, bestParameters["degree"].Value); Assert.AreEqual(1, bestParameters["constant"].Value); Assert.AreEqual(1e-8, bestParameters["complexity"].Value); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.0); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 3); }
/// <summary> /// Runs the one-against-one learning algorithm. /// </summary> /// /// <param name="computeError"> /// True to compute error after the training /// process completes, false otherwise. Default is true. /// </param> /// <param name="token"> /// A <see cref="CancellationToken"/> which can be used /// to request the cancellation of the learning algorithm /// when it is being run in another thread. /// </param> /// /// <returns> /// The sum of squares error rate for /// the resulting support vector machine. /// </returns> /// public double Run(bool computeError, CancellationToken token) { if (configure == null) { var excp = new InvalidOperationException("Please specify the algorithm configuration function " + "by setting the Algorithm property for this class. Examples are available in the " + "documentation for Multiclass Support Vector Learning class (given in the help link)."); excp.HelpLink = "http://accord-framework.net/svn/docs/html/T_Accord_MachineLearning_VectorMachines_MulticlassSupportVectorMachine.htm"; throw excp; } int classes = msvm.Classes; int total = (classes * (classes - 1)) / 2; int progress = 0; var pairs = new Tuple <int, int> [total]; for (int i = 0, k = 0; i < classes; i++) { for (int j = 0; j < i; j++, k++) { pairs[k] = Tuple.Create(i, j); } } msvm.Reset(); // Save exceptions but process all machines var exceptions = new ConcurrentBag <Exception>(); // For each class i Parallel.For(0, total, k => { if (token.IsCancellationRequested) { return; } int i = pairs[k].Item1; int j = pairs[k].Item2; // We will start the binary sub-problem var args = new SubproblemEventArgs(i, j); OnSubproblemStarted(args); // Retrieve the associated machine KernelSupportVectorMachine machine = msvm[i, j]; // Retrieve the associated classes int[] idx = outputs.Find(x => x == i || x == j); double[][] subInputs = inputs.Submatrix(idx); int[] subOutputs = outputs.Submatrix(idx); // Transform it into a two-class problem subOutputs.ApplyInPlace(x => x = (x == i) ? -1 : +1); // Train the machine on the two-class problem. var subproblem = configure(machine, subInputs, subOutputs, i, j); var canCancel = (subproblem as ISupportCancellation); try { if (canCancel != null) { canCancel.Run(false, token); } else { subproblem.Run(false); } } catch (Exception ex) { exceptions.Add(ex); } // Update and report progress args.Progress = Interlocked.Increment(ref progress); args.Maximum = total; OnSubproblemFinished(args); }); if (exceptions.Count > 0) { throw new AggregateException("One or more exceptions were thrown when teaching " + "the machines. Please check the InnerException property of this AggregateException " + "to discover what exactly caused this error.", exceptions); } // Compute error if required. return((computeError) ? ComputeError(inputs, outputs) : 0.0); }
public LinearCoordinateDescent(KernelSupportVectorMachine model, double[][] input, int[] output) : base(model, input, output) { }
private void btnCreate_Click(object sender, EventArgs e) { if (dgvLearningSource.DataSource == null) { MessageBox.Show("Please load some data first."); return; } // Finishes and save any pending changes to the given data dgvLearningSource.EndEdit(); // Create the specified Kernel IKernel kernel = getKernel(); double[,] sourceMatrix; double[,] inputs; int[] labels; getData(out sourceMatrix, out inputs, out labels); // Perform classification SequentialMinimalOptimization smo; // Creates the Support Vector Machine using the selected kernel svm = new KernelSupportVectorMachine(kernel, 2); // Creates a new instance of the SMO Learning Algortihm smo = new SequentialMinimalOptimization(svm, inputs.ToArray(), labels); // Set learning parameters smo.Complexity = (double)numC.Value; smo.Tolerance = (double)numT.Value; // Run double error = smo.Run(); numC.Value = (decimal)smo.Complexity; // Show support vectors double[,] supportVectors = svm.SupportVectors.ToMatrix(); double[,] supportVectorsWeights = supportVectors.InsertColumn( svm.Weights, supportVectors.GetLength(1)); if (supportVectors.GetLength(0) == 0) { dgvSupportVectors.DataSource = null; graphSupportVectors.GraphPane.CurveList.Clear(); return; } dgvSupportVectors.DataSource = new ArrayDataView(supportVectorsWeights, sourceColumns.Submatrix(0, supportVectors.GetLength(1) - 1).Concatenate("Weight")); double[,] graph = supportVectors; int[] idx = new int[svm.SupportVectors.Length]; double[] a = sourceMatrix.GetColumn(0); double[] o = sourceMatrix.GetColumn(2); for (int i = 0; i < idx.Length; i++) { idx[i] = Matrix.Find(a, x => x == svm.SupportVectors[i][0], true)[0]; } graph = graph.InsertColumn(o.Submatrix(idx), 2); // Plot support vectors CreateScatterplot(graphSupportVectors, graph); var ranges = Matrix.Range(sourceMatrix); double[][] map = Matrix.CartesianProduct( Matrix.Interval(ranges[0], 0.05), Matrix.Interval(ranges[1], 0.05)); var result = map.Apply(svm.Compute).Apply(Math.Sign); var graph2 = map.ToMatrix().InsertColumn(result.ToDouble()); CreateScatterplot(zedGraphControl2, graph2); }
public void DynamicalTimeWarpingConstructorTest3() { // Suppose you have sequences of multivariate observations, and that // those sequences could be of arbitrary length. On the other hand, // each observation have a fixed, delimited number of dimensions. // In this example, we have sequences of 3-dimensional observations. // Each sequence can have an arbitrary length, but each observation // will always have length 3: double[][][] sequences = { new double[][] // first sequence { new double[] { 1, 1, 1 }, // first observation of the first sequence new double[] { 1, 2, 1 }, // second observation of the first sequence new double[] { 1, 4, 2 }, // third observation of the first sequence new double[] { 2, 2, 2 }, // fourth observation of the first sequence }, new double[][] // second sequence (note that this sequence has a different length) { new double[] { 1, 1, 1 }, // first observation of the second sequence new double[] { 1, 5, 6 }, // second observation of the second sequence new double[] { 2, 7, 1 }, // third observation of the second sequence }, new double[][] // third sequence { new double[] { 8, 2, 1 }, // first observation of the third sequence }, new double[][] // fourth sequence { new double[] { 8, 2, 5 }, // first observation of the fourth sequence new double[] { 1, 5, 4 }, // second observation of the fourth sequence } }; // Now, we will also have different class labels associated which each // sequence. We will assign -1 to sequences whose observations start // with { 1, 1, 1 } and +1 to those that do not: int[] outputs = { -1, -1, // First two sequences are of class -1 (those start with {1,1,1}) 1, 1, // Last two sequences are of class +1 (don't start with {1,1,1}) }; // At this point, we will have to "flat" out the input sequences from double[][][] // to a double[][] so they can be properly understood by the SVMs. The problem is // that, normally, SVMs usually expect the data to be comprised of fixed-length // input vectors and associated class labels. But in this case, we will be feeding // them arbitrary-length sequences of input vectors and class labels associated with // each sequence, instead of each vector. double[][] inputs = new double[sequences.Length][]; for (int i = 0; i < sequences.Length; i++) { inputs[i] = Matrix.Concatenate(sequences[i]); } // Now we have to setup the Dynamic Time Warping kernel. We will have to // inform the length of the fixed-length observations contained in each // arbitrary-length sequence: // DynamicTimeWarping kernel = new DynamicTimeWarping(length: 3); // Now we can create the machine. When using variable-length // kernels, we will need to pass zero as the input length: var svm = new KernelSupportVectorMachine(kernel, inputs: 0); // Create the Sequential Minimal Optimization learning algorithm var smo = new SequentialMinimalOptimization(svm, inputs, outputs) { Complexity = 1.5 }; // And start learning it! double error = smo.Run(); // error will be 0.0 // At this point, we should have obtained an useful machine. Let's // see if it can understand a few examples it hasn't seem before: double[][] a = { new double[] { 1, 1, 1 }, new double[] { 7, 2, 5 }, new double[] { 2, 5, 1 }, }; double[][] b = { new double[] { 7, 5, 2 }, new double[] { 4, 2, 5 }, new double[] { 1, 1, 1 }, }; // Following the aforementioned logic, sequence (a) should be // classified as -1, and sequence (b) should be classified as +1. int resultA = System.Math.Sign(svm.Compute(Matrix.Concatenate(a))); // -1 int resultB = System.Math.Sign(svm.Compute(Matrix.Concatenate(b))); // +1 Assert.AreEqual(0, error); Assert.AreEqual(-1, resultA); Assert.AreEqual(+1, resultB); }