public SupportVectorMachine<Polynomial> v3_1_0() { ksvm = new SupportVectorMachine<Polynomial>(inputs: 2, kernel: new Polynomial(2)); smo = new SequentialMinimalOptimization<Polynomial>() { Model = ksvm }; smo.Learn(inputs, outputs); return ksvm; }
/// <summary> /// Creates a Support Vector Machine and teaches it to recognize /// the previously loaded dataset using the current UI settings. /// </summary> /// private void btnCreate_Click(object sender, EventArgs e) { if (dgvLearningSource.DataSource == null) { MessageBox.Show("Please load some data first."); return; } // Finishes and save any pending changes to the given data dgvLearningSource.EndEdit(); // Creates a matrix from the entire source data table double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames); // Get only the input vector values (first two columns) double[][] inputs = table.GetColumns(0, 1).ToJagged(); // Get only the output labels (last column) int[] outputs = table.GetColumn(2).ToInt32(); // Creates a new instance of the SMO learning algorithm var smo = new SequentialMinimalOptimization<IKernel>() { // Set learning parameters Complexity = (double)numC.Value, Tolerance = (double)numT.Value, PositiveWeight = (double)numPositiveWeight.Value, NegativeWeight = (double)numNegativeWeight.Value, Kernel = createKernel() }; try { // Run svm = smo.Learn(inputs, outputs); lbStatus.Text = "Training complete!"; } catch (ConvergenceException) { lbStatus.Text = "Convergence could not be attained. "+ "The learned machine might still be usable."; } createSurface(table); // Check if we got support vectors if (svm.SupportVectors == null || svm.SupportVectors.Length == 0) { dgvSupportVectors.DataSource = null; graphSupportVectors.GraphPane.CurveList.Clear(); return; } // Show support vectors on the Support Vectors tab page double[][] supportVectorsWeights = svm.SupportVectors.InsertColumn(svm.Weights); string[] supportVectorNames = columnNames.RemoveAt(columnNames.Length - 1).Concatenate("Weight"); dgvSupportVectors.DataSource = new ArrayDataView(supportVectorsWeights, supportVectorNames); // Show the support vector labels on the scatter plot double[] supportVectorLabels = new double[svm.SupportVectors.Length]; for (int i = 0; i < supportVectorLabels.Length; i++) { int j = inputs.Find(sv => sv == svm.SupportVectors[i])[0]; supportVectorLabels[i] = outputs[j]; } double[][] graph = svm.SupportVectors.InsertColumn(supportVectorLabels); CreateScatterplot(graphSupportVectors, graph.ToMatrix()); }
public void learn_linear() { #region doc_xor_linear // As an example, we will try to learn a linear machine that can // replicate the "exclusive-or" logical function. However, since we // will be using a linear SVM, we will not be able to solve this // problem perfectly as the XOR is a non-linear classification problem: double[][] inputs = { new double[] { 0, 0 }, // the XOR function takes two booleans new double[] { 0, 1 }, // and computes their exclusive or: the new double[] { 1, 0 }, // output is true only if the two booleans new double[] { 1, 1 } // are different }; int[] xor = // this is the output of the xor function { 0, // 0 xor 0 = 0 (inputs are equal) 1, // 0 xor 1 = 1 (inputs are different) 1, // 1 xor 0 = 1 (inputs are different) 0, // 1 xor 1 = 0 (inputs are equal) }; // Now, we can create the sequential minimal optimization teacher var learn = new SequentialMinimalOptimization() { UseComplexityHeuristic = true, UseKernelEstimation = false }; // And then we can obtain a trained SVM by calling its Learn method SupportVectorMachine svm = learn.Learn(inputs, xor); // Finally, we can obtain the decisions predicted by the machine: bool[] prediction = svm.Decide(inputs); #endregion Assert.AreEqual(prediction[0], false); Assert.AreEqual(prediction[1], false); Assert.AreEqual(prediction[2], false); Assert.AreEqual(prediction[3], false); int[] or = // this is the output of the xor function { 0, // 0 or 0 = 0 (inputs are equal) 1, // 0 or 1 = 1 (inputs are different) 1, // 1 or 0 = 1 (inputs are different) 1, // 1 or 1 = 1 (inputs are equal) }; learn = new SequentialMinimalOptimization() { Complexity = 1e+8, UseKernelEstimation = false }; svm = learn.Learn(inputs, or); prediction = svm.Decide(inputs); Assert.AreEqual(prediction[0], false); Assert.AreEqual(prediction[1], true); Assert.AreEqual(prediction[2], true); Assert.AreEqual(prediction[3], true); }
public void linear_without_threshold_doesnt_solve_xor() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = { -1, 1, 1, -1 }; // Create the sequential minimal optimization teacher var learn = new SequentialMinimalOptimization() { Complexity = 1e-5 }; // Run the learning algorithm SupportVectorMachine machine = learn.Learn(inputs, xor); bool[] output = machine.Decide(inputs); for (int i = 0; i < output.Length; i++) Assert.AreEqual(false, output[i]); }
public void learn_sparse_kernel() { #region doc_xor_sparse // As an example, we will try to learn a decision machine // that can replicate the "exclusive-or" logical function: Sparse<double>[] inputs = { Sparse.FromDense(new double[] { 0, 0 }), // the XOR function takes two booleans Sparse.FromDense(new double[] { 0, 1 }), // and computes their exclusive or: the Sparse.FromDense(new double[] { 1, 0 }), // output is true only if the two booleans Sparse.FromDense(new double[] { 1, 1 }) // are different }; int[] xor = // this is the output of the xor function { 0, // 0 xor 0 = 0 (inputs are equal) 1, // 0 xor 1 = 1 (inputs are different) 1, // 1 xor 0 = 1 (inputs are different) 0, // 1 xor 1 = 0 (inputs are equal) }; // Now, we can create the sequential minimal optimization teacher var learn = new SequentialMinimalOptimization<Gaussian, Sparse<double>>() { UseComplexityHeuristic = true, UseKernelEstimation = true }; // And then we can obtain a trained SVM by calling its Learn method var svm = learn.Learn(inputs, xor); // Finally, we can obtain the decisions predicted by the machine: bool[] prediction = svm.Decide(inputs); #endregion Assert.AreEqual(prediction, Classes.Decide(xor)); }
public void learn_precomputed() { #region doc_precomputed // As an example, we will try to learn a decision machine // that can replicate the "exclusive-or" logical function: double[][] inputs = { new double[] { 0, 0 }, // the XOR function takes two booleans new double[] { 0, 1 }, // and computes their exclusive or: the new double[] { 1, 0 }, // output is true only if the two booleans new double[] { 1, 1 } // are different }; int[] xor = // this is the output of the xor function { 0, // 0 xor 0 = 0 (inputs are equal) 1, // 0 xor 1 = 1 (inputs are different) 1, // 1 xor 0 = 1 (inputs are different) 0, // 1 xor 1 = 0 (inputs are equal) }; // Let's use a Gaussian kernel var kernel = new Gaussian(0.1); // Create a pre-computed Gaussian kernel matrix var precomputed = new Precomputed(kernel.ToJagged(inputs)); // Now, we can create the sequential minimal optimization teacher var learn = new SequentialMinimalOptimization<Precomputed, int>() { Kernel = precomputed // set the precomputed kernel we created }; // And then we can obtain the SVM by using Learn var svm = learn.Learn(precomputed.Indices, xor); // Finally, we can obtain the decisions predicted by the machine: bool[] prediction = svm.Decide(precomputed.Indices); // We can also compute the machine prediction to new samples double[][] sample = { new double[] { 0, 1 } }; // Update the precomputed kernel with the new samples precomputed = new Precomputed(kernel.ToJagged2(inputs, sample)); // Update the SVM kernel svm.Kernel = precomputed; // Compute the predictions to the new samples bool[] newPrediction = svm.Decide(precomputed.Indices); #endregion Assert.AreEqual(prediction, Classes.Decide(xor)); Assert.AreEqual(newPrediction.Length, 1); Assert.AreEqual(newPrediction[0], true); }
public void learn_test() { #region doc_learn double[][] inputs = // Example XOR problem { new double[] { 0, 0 }, // 0 xor 0: 1 (label +1) new double[] { 0, 1 }, // 0 xor 1: 0 (label -1) new double[] { 1, 0 }, // 1 xor 0: 0 (label -1) new double[] { 1, 1 } // 1 xor 1: 1 (label +1) }; int[] outputs = // XOR outputs { 1, 0, 0, 1 }; // Instantiate a new SMO learning algorithm for SVMs var smo = new SequentialMinimalOptimization<Gaussian>() { Kernel = new Gaussian(0.1), Complexity = 1.0 }; // Learn a SVM using the algorithm var svm = smo.Learn(inputs, outputs); // Predict labels for each input sample bool[] predicted = svm.Decide(inputs); // Compute classification error double error = new ZeroOneLoss(outputs).Loss(predicted); // Instantiate the probabilistic calibration (using Platt's scaling) var calibration = new ProbabilisticOutputCalibration<Gaussian>(svm); // Run the calibration algorithm calibration.Learn(inputs, outputs); // returns the same machine // Predict probabilities of each input sample double[] probabilities = svm.Probability(inputs); // Compute the error based on a hard decision double loss = new BinaryCrossEntropyLoss(outputs).Loss(probabilities); // Compute the decision output for one of the input vectors, // while also retrieving the probability of the answer bool decision; double probability = svm.Probability(inputs[0], out decision); #endregion // At this point, decision is +1 with a probability of 75% Assert.AreEqual(true, decision); Assert.AreEqual(0, error); Assert.AreEqual(5.5451735748925355, loss); Assert.AreEqual(0.74999975815069375, probability, 1e-10); Assert.IsTrue(svm.IsProbabilistic); Assert.AreEqual(-1.0986109988055595, svm.Weights[0]); Assert.AreEqual(1.0986109988055595, svm.Weights[1]); Assert.AreEqual(-1.0986109988055595, svm.Weights[2]); Assert.AreEqual(1.0986109988055595, svm.Weights[3]); }