public void LearnTest() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = { -1, 1, 1, -1 }; // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length); // Create the Least Squares Support Vector Machine teacher LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, xor); learn.Complexity = 10; // Run the learning algorithm learn.Run(); int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p))); for (int i = 0; i < output.Length; i++) Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i])); }
public void LearnTest() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = { -1, 1, 1, -1 }; // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length); // Create the sequential minimal optimization teacher SequentialMinimalOptimization learn = new SequentialMinimalOptimization(machine, inputs, xor); // Run the learning algorithm learn.Run(); int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p))); for (int i = 0; i < output.Length; i++) Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i])); }
public double v3_0_1() { var ksvm = new KernelSupportVectorMachine(new Polynomial(2), 2); var smo = new SequentialMinimalOptimization(ksvm, inputs, outputs); return smo.Run(computeError: false); }
public override Func<double[], double> Learn(LearningData learningData) { var svm = new KernelSupportVectorMachine(_kernel, learningData.Variables.Count); var smo = new SequentialMinimalOptimization( svm, learningData.Inputs, learningData.Outputs); smo.Run(); return svm.Compute; }
static KernelSupportVectorMachine LearnSVM(HSL[] positives, HSL[] negatives, double throwExceptionWhenErrorGreaterThan) { int[] classes = new int[positives.Length + negatives.Length]; double[][] vectors = new double[classes.Length][]; int index = 0; for (int c = 0; c < positives.Length; c++, index++) { classes[index] = 1; vectors[index] = HSLToDouble(positives[c]); } for (int c = 0; c < negatives.Length; c++, index++) { classes[index] = -1; vectors[index] = HSLToDouble(negatives[c]); } KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(.1), vectors[0].Length); SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, vectors.ToArray(), classes); //smo.Complexity = 1.0; double error = smo.Run(); if (error > throwExceptionWhenErrorGreaterThan) { throw new Exception("Failed to get reasonable error value."); } return svm; }
//public SupportVectorMachine SVM //{ // get { return svm; } // private set { svm = value; } //} public override void TrainningModel(TrainningData trainningData) { ContinuousDataTableAdapter continuousDataTableAdapter = new ContinuousDataTableAdapter(); DataTable continuousDataTable = continuousDataTableAdapter.GetData(); DataTable dataTable = continuousDataTable.DefaultView.ToTable(false, TableMetaData.TestingAttributes); string[] columnNames; double[][] inputs = dataTable.ToArray(out columnNames); int[] outputs = (int[])trainningData.ClassificationAttribute.Clone(); // Create output for SVM (-1 or 1) for (int index = 0; index < outputs.Length; index++) { if (outputs[index] == 0) { outputs[index] = -1; } } // Create a Support Vector Machine for the given inputs //this.svm = new SupportVectorMachine(inputs[0].Length); //// Create a Kernel Support Vector Machine for the given inputs this.svm = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length); // Instantiate a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs); // Set up the learning algorithm smo.Complexity = 1.0; // Run the learning algorithm double error = smo.Run(); }
public void TrainTest() { Accord.Math.Tools.SetupGenerator(0); // Example regression problem. Suppose we are trying // to model the following equation: f(x, y) = 2x + y double[][] inputs = // (x, y) { new double[] { 0, 1 }, // 2*0 + 1 = 1 new double[] { 4, 3 }, // 2*4 + 3 = 11 new double[] { 8, -8 }, // 2*8 - 8 = 8 new double[] { 2, 2 }, // 2*2 + 2 = 6 new double[] { 6, 1 }, // 2*6 + 1 = 13 new double[] { 5, 4 }, // 2*5 + 4 = 14 new double[] { 9, 1 }, // 2*9 + 1 = 19 new double[] { 1, 6 }, // 2*1 + 6 = 8 }; double[] outputs = // f(x, y) { 1, 11, 8, 6, 13, 14, 19, 8 }; // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree var machine = new KernelSupportVectorMachine(new Polynomial(2), inputs: 2); // Create the sequential minimal optimization teacher var learn = new SequentialMinimalOptimizationRegression(machine, inputs, outputs) { Complexity = 100 }; // Run the learning algorithm double error = learn.Run(); // Compute the answer for one particular example double fxy = machine.Compute(inputs[0]); // 1.0003849827673186 // Check for correct answers double[] answers = new double[inputs.Length]; for (int i = 0; i < answers.Length; i++) answers[i] = machine.Compute(inputs[i]); Assert.AreEqual(1.0, fxy, 1e-2); for (int i = 0; i < outputs.Length; i++) Assert.AreEqual(outputs[i], answers[i], 1e-2); }
/// <summary> /// Constructs a new Multi-class Kernel Support Vector Machine /// </summary> /// <param name="kernel">The chosen kernel for the machine.</param> /// <param name="inputs">The number of inputs for the machine.</param> /// <param name="classes">The number of classes in the classification problem.</param> /// <remarks> /// If the number of inputs is zero, this means the machine /// accepts a indefinite number of inputs. This is often the /// case for kernel vector machines using a sequence kernel. /// </remarks> public MulticlassSupportVectorMachine(int inputs, IKernel kernel, int classes) { if (classes <= 1) { throw new ArgumentException("The machine must have at least two classes.", "classes"); } // Create the kernel machines machines = new KernelSupportVectorMachine[classes - 1][]; for (int i = 0; i < classes - 1; i++) { machines[i] = new KernelSupportVectorMachine[i + 1]; for (int j = 0; j <= i; j++) { machines[i][j] = new KernelSupportVectorMachine(kernel, inputs); } } }
public void ComputeTest() { // Example AND problem double[][] inputs = { new double[] { 0, 0 }, // 0 and 0: 0 (label -1) new double[] { 0, 1 }, // 0 and 1: 0 (label -1) new double[] { 1, 0 }, // 1 and 0: 0 (label -1) new double[] { 1, 1 } // 1 and 1: 1 (label +1) }; // Dichotomy SVM outputs should be given as [-1;+1] int[] labels = { // 0, 0, 0, 1 -1, -1, -1, 1 }; // Create a Support Vector Machine for the given inputs KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length); // Instantiate a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(machine, inputs, labels); // Set up the learning algorithm smo.Complexity = 1.0; // Run double error = smo.Run(); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2]))); Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3]))); Assert.AreEqual(error, 0); Assert.AreEqual(-0.6640625, machine.Threshold); Assert.AreEqual(1, machine.Weights[0]); Assert.AreEqual(-0.34375, machine.Weights[1]); Assert.AreEqual(-0.328125, machine.Weights[2]); Assert.AreEqual(-0.328125, machine.Weights[3]); }
public void RunTest() { Accord.Math.Tools.SetupGenerator(0); var dist = NormalDistribution.Standard; double[] x = { +1.0312479734420776, +0.99444115161895752, +0.21835240721702576, +0.47197291254997253, +0.68701112270355225, -0.58556461334228516, -0.64154046773910522, -0.66485315561294556, +0.37940266728401184, -0.61046308279037476 }; double[][] inputs = Jagged.ColumnVector(x); IKernel kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs: 1); var teacher = new OneclassSupportVectorLearning(machine, inputs) { Nu = 0.1 }; // Run the learning algorithm double error = teacher.Run(); Assert.AreEqual(2, machine.Weights.Length); Assert.AreEqual(0.39198910030993617, machine.Weights[0]); Assert.AreEqual(0.60801089969006383, machine.Weights[1]); Assert.AreEqual(inputs[0][0], machine.SupportVectors[0][0]); Assert.AreEqual(inputs[7][0], machine.SupportVectors[1][0]); }
void MainWindow_Loaded(object sender, RoutedEventArgs e) { _green = KernelSupportVectorMachine.Load("resources/green.svm"); _purple = KernelSupportVectorMachine.Load("resources/purple.svm"); _red = KernelSupportVectorMachine.Load("resources/red.svm"); FilterInfoCollection filter = new FilterInfoCollection(FilterCategory.VideoInputDevice); FilterInfo desired = null; foreach (FilterInfo info in filter) { if (info.Name == "QuickCam for Notebooks Deluxe") { desired = info; break; } } _device = new VideoCaptureDevice(desired.MonikerString); _device.NewFrame += _device_NewFrame; _device.Start(); return; }
static void PrintAccuracy(string colorName, KernelSupportVectorMachine svm, HSL[] positives, HSL[] negatives) { int numberCorrect = 0; for (int c = 0; c < positives.Length; c++) { double result = svm.Compute(HSLToDouble(positives[c])); if (Math.Sign(result) == 1) { numberCorrect++; } } for (int c = 0; c < negatives.Length; c++) { double result = svm.Compute(HSLToDouble(negatives[c])); if (Math.Sign(result) == -1) { numberCorrect++; } } Console.WriteLine(colorName + " accuracy is " + (numberCorrect / (positives.Length + negatives.Length * 1.0)).ToString()); }
/// <summary> /// Computes the given input to produce the corresponding output. /// </summary> /// <param name="inputs">An input vector.</param> /// <param name="votes">A vector containing the number of votes for each class.</param> /// <returns>The output for the given input.</returns> public int Compute(double[] inputs, out int[] votes) { // out variables cannot be passed into delegates, // so will be creating a copy for the vote array. int[] voting = new int[this.Classes]; // For each class AForge.Parallel.For(0, Classes, i => { // For each other class for (int j = 0; j < i; j++) { KernelSupportVectorMachine machine = this[i, j]; double answer = machine.Compute(inputs); // Compute the two-class problem if (answer < 0) { voting[i] += 1; // Class i has won } else { voting[j] += 1; // Class j has won } } }); // Voting finished. votes = voting; // Select class which maximum number of votes int output; Matrix.Max(votes, out output); return(output); // Return as the output. }
public void KernelTest2() { var dataset = SequentialMinimalOptimizationTest.yinyang; var inputs = dataset.Submatrix(null, 0, 1).ToArray(); var labels = dataset.GetColumn(2).ToInt32(); var svm = new KernelSupportVectorMachine(new Linear(1), inputs: 2); bool thrown = false; try { new ProbabilisticCoordinateDescent(svm, inputs, labels); } catch (ArgumentException) { thrown = true; } Assert.IsTrue(thrown); }
public void KernelTest1() { var dataset = SequentialMinimalOptimizationTest.yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); double e1, e2; double[] w1, w2; { Accord.Math.Tools.SetupGenerator(0); var svm = new SupportVectorMachine(inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; e1 = teacher.Run(); w1 = svm.ToWeights(); } { Accord.Math.Tools.SetupGenerator(0); var svm = new KernelSupportVectorMachine(new Linear(0), inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; e2 = teacher.Run(); w2 = svm.ToWeights(); } Assert.AreEqual(e1, e2); Assert.AreEqual(w1.Length, w2.Length); Assert.AreEqual(w1[0], w2[0]); Assert.AreEqual(w1[1], w2[1]); Assert.AreEqual(w1[2], w2[2]); }
public void BootstrapConstructorTest3() { Accord.Math.Tools.SetupGenerator(0); // This is a sample code on how to use 0.632 Bootstrap // to assess the performance of Support Vector Machines. // Consider the example binary data. We will be trying // to learn a XOR problem and see how well does SVMs // perform on this data. double[][] data = { new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, }; int[] xor = // result of xor for the sample input data { -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, }; // Create a new Bootstrap algorithm passing the set size and the number of resamplings var bootstrap = new Bootstrap(size: data.Length, subsamples: 50); // Define a fitting function using Support Vector Machines. The objective of this // function is to learn a SVM in the subset of the data indicated by the bootstrap. bootstrap.Fitting = delegate(int[] indicesTrain, int[] indicesValidation) { // The fitting function is passing the indices of the original set which // should be considered training data and the indices of the original set // which should be considered validation data. // Lets now grab the training data: var trainingInputs = data.Submatrix(indicesTrain); var trainingOutputs = xor.Submatrix(indicesTrain); // And now the validation data: var validationInputs = data.Submatrix(indicesValidation); var validationOutputs = xor.Submatrix(indicesValidation); // Create a Kernel Support Vector Machine to operate on the set var svm = new KernelSupportVectorMachine(new Polynomial(2), 2); // Create a training algorithm and learn the training data var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs); double trainingError = smo.Run(); // Now we can compute the validation error on the validation data: double validationError = smo.ComputeError(validationInputs, validationOutputs); // Return a new information structure containing the model and the errors achieved. return new BootstrapValues(trainingError, validationError); }; // Compute the bootstrap estimate var result = bootstrap.Compute(); // Finally, access the measured performance. double trainingErrors = result.Training.Mean; double validationErrors = result.Validation.Mean; // And compute the 0.632 estimate double estimate = result.Estimate; Assert.AreEqual(50, bootstrap.B); Assert.AreEqual(0, trainingErrors); Assert.AreEqual(0.021428571428571429, validationErrors); Assert.AreEqual(50, bootstrap.Subsamples.Length); Assert.AreEqual(0.013542857142857143, estimate); }
public void ComputeTest() { // Example AND problem double[][] inputs = { new double[] { 0, 0 }, // 0 and 0: 0 (label -1) new double[] { 0, 1 }, // 0 and 1: 0 (label -1) new double[] { 1, 0 }, // 1 and 0: 0 (label -1) new double[] { 1, 1 } // 1 and 1: 1 (label +1) }; // Dichotomy SVM outputs should be given as [-1;+1] int[] labels = { // 0, 0, 0, 1 -1, -1, -1, 1 }; // Create a Support Vector Machine for the given inputs KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Linear(0), inputs[0].Length); // Instantiate a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(machine, inputs, labels); // Set up the learning algorithm smo.Complexity = 100.0; // Run double error = smo.Run(); Assert.AreEqual(0, error); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2]))); Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3]))); // At this point we have the weighted support vectors // w sv b // (+4) * (1,1) -3 // (-2) * (1,0) // (-2) * (0,1) // // However, it can be seen that the last SV can be written // as a linear combination of the two first vectors: // // (0,1) = (1,1) - (1,0) // // Since we have a linear space (we are using a linear kernel) // this vector could be removed from the support vector set. // // f(x) = sum(alpha_i * x * x_i) + b // = 4*(1,1)*x - 2*(1,0)*x - 2*(0,1)*x - 3 // = 4*(1,1)*x - 2*(1,0)*x - 2*((1,1) - (1,0))*x - 3 // = 4*(1,1)*x - 2*(1,0)*x - 2*(1,1)*x + 2*(1,0)*x - 3 // = 4*(1,1)*x - 2*(1,0)*x - 2*(1,1)*x + 2*(1,0)*x - 3 // = 2*(1,1)*x - 3 // = 2*x1 + 2*x2 - 3 // SupportVectorReduction svr = new SupportVectorReduction(machine); double error2 = svr.Run(); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1]))); Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2]))); Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3]))); }
/// <summary> /// Compute SVM output with support vector sharing. /// </summary> /// private int computeParallel(int classA, int classB, double[] input, out double output, Cache cache) { // Get the machine for this problem KernelSupportVectorMachine machine = machines[classA - 1][classB]; // Get the vectors shared among all machines int[] vectors = cache.Vectors[classA - 1][classB]; double[] values = cache.Products; #if !NET35 SpinLock[] locks = cache.SyncObjects; #endif double sum = machine.Threshold; if (machine.IsCompact) { if (machine.Weights == null) { throw new Exception(); } // For linear machines, computation is simpler for (int i = 0; i < machine.Weights.Length; i++) { sum += machine.Weights[i] * input[i]; } } else { #if NET35 #region Backward compatibility for (int i = 0; i < vectors.Length; i++) { double value; // Check if it is a shared vector int j = vectors[i]; if (j >= 0) { // This is a shared vector. Check // if it has already been computed if (!Double.IsNaN(values[j])) { // Yes, it has. Retrieve the value from the cache value = values[j]; } else { // No, it has not. Compute and store the computed value in the cache value = values[j] = machine.Kernel.Function(machine.SupportVectors[i], input); Interlocked.Increment(ref cache.Evaluations); } } else { // This vector is not shared by any other machine. No need to cache value = machine.Kernel.Function(machine.SupportVectors[i], input); Interlocked.Increment(ref cache.Evaluations); } sum += machine.Weights[i] * value; } #endregion #else // For each support vector in the machine Parallel.For <double>(0, vectors.Length, // Init () => 0.0, // Map (i, state, partialSum) => { double value; // Check if it is a shared vector int j = vectors[i]; if (j >= 0) { // This is a shared vector. Check // if it has already been computed bool taken = false; locks[j].Enter(ref taken); if (!Double.IsNaN(values[j])) { // Yes, it has. Retrieve the value from the cache value = values[j]; } else { // No, it has not. Compute and store the computed value in the cache value = values[j] = machine.Kernel.Function(machine.SupportVectors[i], input); Interlocked.Increment(ref cache.Evaluations); } locks[j].Exit(); } else { // This vector is not shared by any other machine. No need to cache value = machine.Kernel.Function(machine.SupportVectors[i], input); Interlocked.Increment(ref cache.Evaluations); } return(partialSum + machine.Weights[i] * value); }, // Reduce (partialSum) => { lock (locks) sum += partialSum; } ); #endif } // Produce probabilities if required if (machine.IsProbabilistic) { output = machine.Link.Inverse(sum); return(output >= 0.5 ? +1 : -1); } else { output = sum; return(output >= 0 ? +1 : -1); } }
public void ComputeTest5() { var dataset = yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.14, error); Assert.AreEqual(30, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; double[] expectedWeights = { -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 0.337065120144639, -1, 1, -0.337065120144639, -1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(7, matrix.FalseNegatives); Assert.AreEqual(7, matrix.FalsePositives); Assert.AreEqual(43, matrix.TruePositives); Assert.AreEqual(43, matrix.TrueNegatives); } { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 0.3; smo.NegativeWeight = 1.0; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(0.3 / 1.0, smo.WeightRatio); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(0.3, smo.PositiveWeight); Assert.AreEqual(0.21, error); Assert.AreEqual(24, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; //string str = actualWeights.ToString(Accord.Math.Formats.CSharpArrayFormatProvider.InvariantCulture); double[] expectedWeights = { -0.771026323762095, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -0.928973676237905, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = (int)machine.Compute(inputs[i]); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(50, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(0, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); } { Linear kernel = new Linear(); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.PositiveWeight = 1.0; smo.NegativeWeight = 0.3; double error = smo.Run(); Assert.AreEqual(1.0, smo.Complexity); Assert.AreEqual(1.0 / 0.3, smo.WeightRatio); Assert.AreEqual(0.3, smo.NegativeWeight); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.15, error); Assert.AreEqual(19, machine.SupportVectors.Length); double[] actualWeights = machine.Weights; double[] expectedWeights = new double[] { 1, 1, -0.3, 1, -0.3, 1, 1, -0.3, 1, 1, 1, 1, 1, 1, 1, 1, 0.129080057278249, 1, 0.737797469918795 }; Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(0, matrix.FalseNegatives); Assert.AreEqual(50, matrix.FalsePositives); Assert.AreEqual(50, matrix.TruePositives); Assert.AreEqual(0, matrix.TrueNegatives); } }
private void btnCreate_Click(object sender, EventArgs e) { if (dgvLearningSource.DataSource == null) { MessageBox.Show("Please load some data first."); return; } // Finishes and save any pending changes to the given data dgvLearningSource.EndEdit(); // Creates a matrix from the entire source data table double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames); // Get only the input vector values (first two columns) double[][] inputs = table.GetColumns(0).ToArray(); // Get only the outputs (last column) double[] outputs = table.GetColumn(1); // Create the specified Kernel IKernel kernel = createKernel(); // Create the Support Vector Machine for 1 input variable svm = new KernelSupportVectorMachine(kernel, inputs: 1); // Creates a new instance of the SMO for regression learning algorithm var smo = new SequentialMinimalOptimizationRegression(svm, inputs, outputs) { // Set learning parameters Complexity = (double)numC.Value, Tolerance = (double)numT.Value, Epsilon = (double)numEpsilon.Value }; try { // Run double error = smo.Run(); lbStatus.Text = "Training complete!"; } catch (ConvergenceException) { lbStatus.Text = "Convergence could not be attained. " + "The learned machine might still be usable."; } // Check if we got support vectors if (svm.SupportVectors.Length == 0) { dgvSupportVectors.DataSource = null; graphSupportVectors.GraphPane.CurveList.Clear(); return; } // Show support vectors on the Support Vectors tab page double[][] supportVectorsWeights = svm.SupportVectors.InsertColumn(svm.Weights); string[] supportVectorNames = columnNames.RemoveAt(columnNames.Length - 1).Concatenate("Weight"); dgvSupportVectors.DataSource = new ArrayDataView(supportVectorsWeights, supportVectorNames); // Show the support vector labels on the scatter plot double[] supportVectorLabels = new double[svm.SupportVectors.Length]; for (int i = 0; i < supportVectorLabels.Length; i++) { int j = inputs.Find(sv => sv == svm.SupportVectors[i])[0]; supportVectorLabels[i] = outputs[j]; } double[][] graph = svm.SupportVectors.InsertColumn(supportVectorLabels); CreateScatterplot(graphSupportVectors, graph.ToMatrix()); // Get the ranges for each variable (X and Y) DoubleRange range = Matrix.Range(table.GetColumn(0)); double[][] map = Matrix.Interval(range, 0.05).ToArray(); // Classify each point in the Cartesian coordinate system double[] result = map.Apply(svm.Compute); double[,] surface = map.ToMatrix().InsertColumn(result); CreateScatterplot(zedGraphControl2, surface); }
private Accord.MachineLearning.VectorMachines.Learning.ISupportVectorMachineLearning getAlg(KernelSupportVectorMachine svm, double[][] classInputs, int[] classOutputs) { Accord.MachineLearning.VectorMachines.Learning.SequentialMinimalOptimization smo = new Accord.MachineLearning.VectorMachines.Learning.SequentialMinimalOptimization(svm, classInputs, classOutputs); double c = Accord.MachineLearning.VectorMachines.Learning.SequentialMinimalOptimization.EstimateComplexity(svm.Kernel, classInputs); smo.Complexity = c; smo.Tolerance = 0.01; return (Accord.MachineLearning.VectorMachines.Learning.ISupportVectorMachineLearning)smo; }
/// <summary> /// Constructs a new Multi-class Kernel Support Vector Machine /// </summary> /// /// <param name="kernel">The chosen kernel for the machine.</param> /// <param name="inputs">The number of inputs for the machine.</param> /// <param name="classes">The number of classes in the classification problem.</param> /// <remarks> /// If the number of inputs is zero, this means the machine /// accepts a indefinite number of inputs. This is often the /// case for kernel vector machines using a sequence kernel. /// </remarks> /// public MulticlassSupportVectorMachine(int inputs, IKernel kernel, int classes) { if (classes <= 1) throw new ArgumentException("The machine must have at least two classes.", "classes"); // Create the kernel machines machines = new KernelSupportVectorMachine[classes - 1][]; for (int i = 0; i < machines.Length; i++) { machines[i] = new KernelSupportVectorMachine[i + 1]; for (int j = 0; j <= i; j++) machines[i][j] = new KernelSupportVectorMachine(kernel, inputs); } }
/// <summary> /// Constructs a new Multi-class Kernel Support Vector Machine /// </summary> /// /// <param name="machines"> /// The machines to be used in each of the pairwise class subproblems. /// </param> /// public MulticlassSupportVectorMachine(KernelSupportVectorMachine[][] machines) { if (machines == null) throw new ArgumentNullException("machines"); this.machines = machines; }
public void DynamicalTimeWarpingConstructorTest() { double[][] sequences = { new double[] // -1 { 0, 0, 0, 1, 1, 1, 2, 2, 2, }, new double[] // -1 { 0, 1, 0, 0, 2, 0, 0, 3, 0 }, new double[] // +1 { 1, 1, 0, 1, 2, 0, 2, 1, 0, }, new double[] // +1 { 0, 0, 1, 0, 0, 2, 0, 1, 3, }, }; int[] outputs = { -1, -1, +1, +1 }; // Set the parameters of the kernel double alpha = 0.85; int innerVectorLength = 3; // Create the kernel. Note that the input vector will be given out automatically DynamicTimeWarping target = new DynamicTimeWarping(innerVectorLength, alpha); // When using variable-length kernels, specify 0 as the input length. KernelSupportVectorMachine svm = new KernelSupportVectorMachine(target, 0); // Create the Sequential Minimal Optimization as usual SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, sequences, outputs); smo.Complexity = 1.5; double error = smo.Run(); // Computing the training values var a0 = svm.Compute(sequences[0]); var a1 = svm.Compute(sequences[1]); var a2 = svm.Compute(sequences[2]); var a3 = svm.Compute(sequences[3]); Assert.AreEqual(-1, System.Math.Sign(a0)); Assert.AreEqual(-1, System.Math.Sign(a1)); Assert.AreEqual(+1, System.Math.Sign(a2)); Assert.AreEqual(+1, System.Math.Sign(a3)); // Computing a new testing value double[] test = { 1, 0, 1, 0, 0, 2, 0, 1, 3, }; var a4 = svm.Compute(test); }
public void LargeLearningTest1() { // Create large input vectors int rows = 1000; int dimension = 10000; double[][] inputs = new double[rows][]; int[] outputs = new int[rows]; Random rnd = new Random(); for (int i = 0; i < inputs.Length; i++) { inputs[i] = new double[dimension]; if (i > rows / 2) { for (int j = 0; j < dimension; j++) inputs[i][j] = rnd.NextDouble(); outputs[i] = -1; } else { for (int j = 0; j < dimension; j++) inputs[i][j] = rnd.NextDouble() * 4.21 + 5; outputs[i] = +1; } } KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Polynomial(2), dimension); SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs) { UseComplexityHeuristic = true }; double error = smo.Run(); Assert.AreEqual(0, error); }
/// <summary> /// Creates a new linear <see cref="SupportVectorMachine"/> /// with the given set of linear <paramref name="weights"/>. /// </summary> /// /// <param name="weights">The machine's linear coefficients.</param> /// /// <returns> /// A <see cref="SupportVectorMachine"/> whose linear coefficients /// are defined by the given <paramref name="weights"/> vector. /// </returns> /// public new static KernelSupportVectorMachine FromWeights(double[] weights) { var svm = new KernelSupportVectorMachine(new Linear(0), weights.Length - 1); for (int i = 0; i < svm.Weights.Length; i++) svm.Weights[i] = weights[i + 1]; svm.Threshold = weights[0]; return svm; }
public void UseClassProportionsTest() { var dataset = KernelSupportVectorMachineTest.training; var inputs = dataset.Submatrix(null, 0, 3); var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32(); Gaussian kernel = Gaussian.Estimate(inputs); var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.UseClassProportions = true; double error = smo.Run(); Assert.AreEqual(1, smo.Complexity); Assert.AreEqual(0.4, smo.PositiveWeight); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(0.4, smo.WeightRatio, 1e-10); Assert.AreEqual(0.2857142857142857, error); Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma); Assert.AreEqual(26, machine.SupportVectors.Length); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(12, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(0, matrix.TruePositives); Assert.AreEqual(30, matrix.TrueNegatives); }
/// <summary> /// Compute SVM output with support vector sharing. /// </summary> /// private int computeSequential(int classA, int classB, double[] input, out double output, Cache cache) { // Get the machine for this problem KernelSupportVectorMachine machine = machines[classA - 1][classB]; // Get the vectors shared among all machines int[] vectors = cache.Vectors[classA - 1][classB]; double[] values = cache.Products; double sum = machine.Threshold; if (machine.IsCompact) { // For linear machines, computation is simpler for (int i = 0; i < machine.Weights.Length; i++) { sum += machine.Weights[i] * input[i]; } } else { // For each support vector in the machine for (int i = 0; i < vectors.Length; i++) { double value; // Check if it is a shared vector int j = vectors[i]; if (j >= 0) { // This is a shared vector. Check // if it has already been computed if (!Double.IsNaN(values[j])) { // Yes, it has. Retrieve the value from the cache value = values[j]; } else { // No, it has not. Compute and store the computed value in the cache value = values[j] = machine.Kernel.Function(machine.SupportVectors[i], input); Interlocked.Increment(ref cache.Evaluations); } } else { // This vector is not shared by any other machine. No need to cache value = machine.Kernel.Function(machine.SupportVectors[i], input); Interlocked.Increment(ref cache.Evaluations); } sum += machine.Weights[i] * value; } } // Produce probabilities if required if (machine.IsProbabilistic) { output = machine.Link.Inverse(sum); return(output >= 0.5 ? +1 : -1); } else { output = sum; return(output >= 0 ? +1 : -1); } }
public void DynamicalTimeWarpingConstructorTest2() { // Declare some testing data double[][] inputs = { // Class -1 new double[] { 0,1,1,0 }, new double[] { 0,0,1,0 }, new double[] { 0,1,1,1,0 }, new double[] { 0,1,0 }, // Class +1 new double[] { 1,0,0,1 }, new double[] { 1,1,0,1 }, new double[] { 1,0,0,0,1 }, new double[] { 1,0,1 }, new double[] { 1,0,0,0,1,1 } }; int[] outputs = { -1,-1,-1,-1, // First four sequences are of class -1 1, 1, 1, 1, 1 // Last five sequences are of class +1 }; // Set the parameters of the kernel double alpha = 1.0; int degree = 1; int innerVectorLength = 1; // Create the kernel. Note that the input vector will be given out automatically DynamicTimeWarping target = new DynamicTimeWarping(innerVectorLength, alpha, degree); // When using variable-length kernels, specify 0 as the input length. KernelSupportVectorMachine svm = new KernelSupportVectorMachine(target, 0); // Create the Sequential Minimal Optimization as usual SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs); smo.Complexity = 1.5; double error = smo.Run(); // Check if the model has learnt the sequences correctly. for (int i = 0; i < inputs.Length; i++) { int expected = outputs[i]; int actual = System.Math.Sign(svm.Compute(inputs[i])); Assert.AreEqual(expected, actual); } // Testing new sequences Assert.AreEqual(-1,System.Math.Sign(svm.Compute(new double[] { 0, 1, 1, 0, 0 }))); Assert.AreEqual(+1,System.Math.Sign(svm.Compute(new double[] { 1, 1, 0, 0, 1, 1 }))); }
public void WeightRatioTest() { var dataset = KernelSupportVectorMachineTest.training; var inputs = dataset.Submatrix(null, 0, 3); var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32(); Gaussian kernel = Gaussian.Estimate(inputs); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.WeightRatio = 10; double error = smo.Run(); Assert.AreEqual(1.0, smo.PositiveWeight); Assert.AreEqual(0.1, smo.NegativeWeight); Assert.AreEqual(0.7142857142857143, error); Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma); Assert.AreEqual(39, machine.SupportVectors.Length); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(12, matrix.TruePositives); // has more importance Assert.AreEqual(0, matrix.FalseNegatives); // has more importance Assert.AreEqual(30, matrix.FalsePositives); Assert.AreEqual(0, matrix.TrueNegatives); Assert.AreEqual(1.0, matrix.Sensitivity); Assert.AreEqual(0.0, matrix.Specificity); Assert.AreEqual(0.44444444444444448, matrix.FScore); Assert.AreEqual(0.0, matrix.MatthewsCorrelationCoefficient); } { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 1.0; smo.WeightRatio = 0.1; double error = smo.Run(); Assert.AreEqual(0.1, smo.PositiveWeight); Assert.AreEqual(1.0, smo.NegativeWeight); Assert.AreEqual(0.21428571428571427, error); Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma); Assert.AreEqual(18, machine.SupportVectors.Length); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(8, matrix.FalseNegatives); Assert.AreEqual(1, matrix.FalsePositives); // has more importance Assert.AreEqual(4, matrix.TruePositives); Assert.AreEqual(29, matrix.TrueNegatives); // has more importance Assert.AreEqual(0.33333333333333331, matrix.Sensitivity); Assert.AreEqual(0.96666666666666667, matrix.Specificity); Assert.AreEqual(0.47058823529411764, matrix.FScore); Assert.AreEqual(0.41849149947774944, matrix.MatthewsCorrelationCoefficient); } }
/// <summary> /// Creates a new <see cref="SupportVectorMachine"/> that is /// completely equivalent to a <see cref="LogisticRegression"/>. /// </summary> /// /// <param name="regression">The <see cref="LogisticRegression"/> to be converted.</param> /// /// <returns> /// A <see cref="KernelSupportVectorMachine"/> whose linear weights /// are equivalent to the given <see cref="LogisticRegression"/>'s /// <see cref="GeneralizedLinearRegression.Coefficients"> linear /// coefficients</see>, properly configured with a <see cref="LogLinkFunction"/>. /// </returns> /// public new static KernelSupportVectorMachine FromLogisticRegression(LogisticRegression regression) { double[] weights = regression.Coefficients; var svm = new KernelSupportVectorMachine(new Linear(), regression.Inputs); for (int i = 0; i < svm.Weights.Length; i++) svm.Weights[i] = weights[i + 1]; svm.Threshold = regression.Intercept; svm.Link = new LogitLinkFunction(1, 0); return svm; }
public void FixedWeightsTest() { var dataset = KernelSupportVectorMachineTest.training; var inputs = dataset.Submatrix(null, 0, 3); var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32(); KernelSupportVectorMachine machine = new KernelSupportVectorMachine( Gaussian.Estimate(inputs), inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.Complexity = 10; double error = smo.Run(); Assert.AreEqual(0.19047619047619047, error); Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma); Assert.AreEqual(29, machine.SupportVectors.Length); double[] expectedWeights = { 1.65717694716503, 1.20005456611466, -5.70824245415995, 10, 10, -2.38755497916487, 10, -8.15723436363058, 10, -10, 10, 10, -0.188634936781317, -5.4354281009458, -8.48341139483265, -5.91105702760141, -5.71489190049223, 10, -2.37289205235858, -3.33031262413522, -1.97545116517677, 10, -10, -9.563186799279, -3.917941544845, -0.532584110773336, 4.81951847548326, 0.343668292727091, -4.34159482731336 }; Assert.IsTrue(expectedWeights.IsEqual(machine.Weights, 1e-6)); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(8, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(4, matrix.TruePositives); Assert.AreEqual(30, matrix.TrueNegatives); Assert.AreEqual(1 / 3.0, matrix.Sensitivity); Assert.AreEqual(1, matrix.Specificity); Assert.AreEqual(0.5, matrix.FScore); Assert.AreEqual(0.5129891760425771, matrix.MatthewsCorrelationCoefficient); }
private void button3_Click(object sender, EventArgs e) { string[] Second = File.ReadAllLines(textBox14.Text); string[] First = File.ReadAllLines(textBox13.Text); List<double[]> F = new List<double[]>(); List<double[]> S = new List<double[]>(); double Alpha1Thresh = int.MaxValue; //2000;// int.MaxValue;// double Alpha2Thresh = int.MaxValue; //2000; // for (int i=0;i<First.Length;i++) { string[] s1 = First[i].Split(' '); if ((double.Parse(s1[2]) < Alpha1Thresh) && (double.Parse(s1[3]) < Alpha2Thresh)) { double[] ar = new double[VectorSize]; double sum = 0; for (int j = 0; j < VectorSize; j++) { ar[j] = double.Parse(s1[j]); if ((j < VectorSize - 2) && (2<=j )) sum += ar[j]; } for (int j = 2; j < VectorSize-2; j++) ar[j] = ar[j] / 1000; if (ar[0] > 2000) { ar[0] = 2000; } if (ar[1] > 2000) { ar[1] = 2000; } ar[0] = ar[0] / 2000; ar[1] = ar[1] / 2000; ar[VectorSize - 2] = ar[VectorSize - 2] / 100; ar[VectorSize - 1] = ar[VectorSize - 1] / 100; F.Add(ar); } } for (int i = 0; i < Second.Length; i++) { string[] s1 = Second[i].Split(' '); if ((double.Parse(s1[2]) < Alpha1Thresh) && (double.Parse(s1[3]) < Alpha2Thresh)) { double[] ar = new double[VectorSize]; double sum = 0; for (int j = 0; j < VectorSize; j++) { ar[j] = double.Parse(s1[j]); if ((j < VectorSize - 2) && (2 <= j)) sum += ar[j]; } for (int j = 2; j < VectorSize - 2; j++) ar[j] = ar[j] / 1000; if (ar[0]>2000) { ar[0] = 2000; } if (ar[1] > 2000) { ar[1] = 2000; } ar[0] = ar[0] / 2000; ar[1] = ar[1] / 2000; ar[VectorSize - 2] = ar[VectorSize - 2] / 100; ar[VectorSize - 1] = ar[VectorSize - 1] / 100; S.Add(ar); } } int min = Math.Min(F.Count, S.Count); double[][] inputs = new double[2*min][]; int[] outputs = new int[2*min]; int VS = VectorSize; //ТУТ for (int j=0;j<min;j++) { inputs[j] = new double[VS]; inputs[j + min] = new double[VS]; for (int i = 0; i < VS; i++) // for (int i = VectorSize - 2; i < VectorSize; i++)//ТУТ { inputs[j][i] = F[j][i];//ТУТ inputs[j + min][i] = S[j][i];//ТУТ // inputs[j][i - VectorSize + 2] = F[j][i];//ТУТ // inputs[j + min][i - VectorSize + 2] = S[j][i];//ТУТ } outputs[j] = -1; outputs[j + min] = 1; } // Get only the output labels (last column) // Create the specified Kernel IKernel kernel = new Gaussian((double)0.560); // IKernel kernel = new Polynomial(5, 500.0); // Creates the Support Vector Machine for 2 input variables svm = new KernelSupportVectorMachine(kernel, inputs: VS); // Creates a new instance of the SMO learning algorithm var smo = new SequentialMinimalOptimization(svm, inputs, outputs) { // Set learning parameters Complexity = (double)1.50, Tolerance = (double)0.001, PositiveWeight = (double)1.00, NegativeWeight = (double)1.00, }; try { // Run double error = smo.Run(); } catch (ConvergenceException) { } // double d = svm.Compute(inputs[10]); points.Clear(); Points = 0; points_mid.Clear(); timer3.Enabled = true; }
public void TransformTest() { var inputs = yinyang.Submatrix(null, 0, 1).ToArray(); var labels = yinyang.GetColumn(2).ToInt32(); ConfusionMatrix actual, expected; SequentialMinimalOptimization a, b; var kernel = new Polynomial(2, 0); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); a = new SequentialMinimalOptimization(machine, inputs, labels); a.UseComplexityHeuristic = true; a.Run(); int[] values = new int[labels.Length]; for (int i = 0; i < values.Length; i++) values[i] = Math.Sign(machine.Compute(inputs[i])); expected = new ConfusionMatrix(values, labels); } { var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); b = new SequentialMinimalOptimization(machine, projection, labels); b.UseComplexityHeuristic = true; b.Run(); int[] values = new int[labels.Length]; for (int i = 0; i < values.Length; i++) values[i] = Math.Sign(machine.Compute(projection[i])); actual = new ConfusionMatrix(values, labels); } Assert.AreEqual(a.Complexity, b.Complexity, 1e-15); Assert.AreEqual(expected.TrueNegatives, actual.TrueNegatives); Assert.AreEqual(expected.TruePositives, actual.TruePositives); Assert.AreEqual(expected.FalseNegatives, actual.FalseNegatives); Assert.AreEqual(expected.FalsePositives, actual.FalsePositives); }