public void init(List<double[]> patterns,List<int> labels) { IKernel kernel = createKernel(3); double complexity = 0.00001; double tolerance = 0.2; int cacheSize =500; SelectionStrategy strategy = SelectionStrategy.Sequential; // Create the Multi-class Support Vector Machine using the selected Kernel ksvm = new MulticlassSupportVectorMachine(128, kernel, 4); // Create the learning algorithm using the machine and the training data MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, patterns.ToArray(), labels.ToArray()) { // Configure the learning algorithm Algorithm = (svm, classInputs, classOutputs, i, j) => // Use Platt's Sequential Minimal Optimization algorithm new SequentialMinimalOptimization(svm, classInputs, classOutputs) { Complexity = complexity, Tolerance = tolerance, CacheSize = cacheSize, Strategy = strategy, Compact = (kernel is Linear) } }; double error = ml.Run(); Console.WriteLine(error); }
public double v3_0_1() { var ksvm = new MulticlassSupportVectorMachine(784, new Polynomial(2), 10); var smo = new MulticlassSupportVectorLearning(ksvm, problem.Training.Inputs, problem.Training.Output); smo.Algorithm = (svm, x, y, i, j) => new SequentialMinimalOptimization(svm, x, y); return smo.Run(computeError: false); }
public void Setup() { ksvm = new MulticlassSupportVectorMachine<Polynomial>( inputs: 2, kernel: new Polynomial(2), classes: 10); smo = new MulticlassSupportVectorLearning<Polynomial>() { Model = ksvm }; }
//// Do training for all existing trained Data public SVM(string TrainedDataInputFile) { _engine = new TesseractEngine(@"./tessdata3", "eng", EngineMode.TesseractAndCube); _engine.SetVariable("tessedit_char_whitelist", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"); _engine.SetVariable("tessedit_char_blacklist", "¢§+~»~`!@#$%^&*()_+-={}[]|\\:\";\'<>?,./"); string[] TrainedData = Directory.GetFiles(TrainedDataInputFile, "*.png"); double[][] inputs = new double[TrainedData.Length][]; /// double[] InputArray = new double[784]; int[] Outputs = new int[TrainedData.Length]; for (int i = 0; i < TrainedData.Length; i++) { string filename = Path.GetFileNameWithoutExtension(TrainedData[i]); Bitmap TrainingImage = new Bitmap(TrainedData[i]); string[] split = filename.Split('.'); for (int j = 0; j < 28; j++) { for (int k = 0; k < 28; k++) { if ((!TrainingImage.GetPixel(j, k).Name.Equals("ffffffff"))) InputArray[j * 28 + k] = 1; else InputArray[j * 28 + k] = 0; } } inputs[i] = InputArray; Outputs[i] = Convert.ToInt32(split[0]); InputArray = new double[784]; } IKernel kernel; kernel = new Polynomial(2, 0); ksvm = new MulticlassSupportVectorMachine(784, kernel, 2); MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, inputs, Outputs); double complexity = 1; ///// set these three parameters Carefuly later double epsilon = 0.001; double tolerance = 0.2; ml.Algorithm = (svm, classInputs, classOutputs, i, j) => { var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs); smo.Complexity = complexity; /// Cost parameter for SVM smo.Epsilon = epsilon; smo.Tolerance = tolerance; return smo; }; // Train the machines. It should take a while. double error = ml.Run(); }
public MulticlassSupportVectorMachine<Polynomial> v3_1_0() { ksvm = new MulticlassSupportVectorMachine<Polynomial>( inputs: 2, kernel: new Polynomial(2), classes: 10); smo = new MulticlassSupportVectorLearning<Polynomial>() { Model = ksvm }; smo.Learn(problem.Training.Inputs, problem.Testing.Output); return ksvm; }
public void Aprender(IDadosSinaisEstaticos dados) { var kernel = new Polynomial(degree: 3, constant: 1); svm = new MulticlassSupportVectorMachine(QuantidadeIndeterminadaDeCaracteristicas, kernel, dados.QuantidadeClasses); var teacher = new MulticlassSupportVectorLearning(svm, dados.CaracteristicasSinais, dados.IdentificadoresSinais) { Algorithm = (machine, classInputs, classOutputs, j, k) => new SequentialMinimalOptimization(machine, classInputs, classOutputs) { Complexity = 1 } }; teacher.Run(); }
public void RunTest() { Accord.Math.Tools.SetupGenerator(0); // Sample data // The following is a simple auto association function // in which each input correspond to its own class. This // problem should be easily solved using a Linear kernel. // Sample input data double[][] inputs = { new double[] { 0 }, new double[] { 3 }, new double[] { 1 }, new double[] { 2 }, }; // Output for each of the inputs int[] outputs = { 0, 3, 1, 2 }; // Create a new Linear kernel IKernel kernel = new Linear(); // Create a new Multi-class Support Vector Machine for one input, // using the linear kernel and four disjoint classes. var machine = new MulticlassSupportVectorMachine(1, kernel, 4); // Create the Multi-class learning algorithm for the machine var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs); // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); // Run the learning algorithm double error = teacher.Run(); Assert.AreEqual(0, error); Assert.AreEqual(0, machine.Compute(inputs[0])); Assert.AreEqual(3, machine.Compute(inputs[1])); Assert.AreEqual(1, machine.Compute(inputs[2])); Assert.AreEqual(2, machine.Compute(inputs[3])); }
private void btnLearn_Click(object sender, EventArgs e) { if (gridSamples.Rows.Count == 0) { MessageBox.Show("Please load or insert some data first."); return; } BindingList<Sequence> samples = database.Samples; BindingList<String> classes = database.Classes; double[][] inputs = new double[samples.Count][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } svm = new MulticlassSupportVectorMachine(0, new DynamicTimeWarping(2), classes.Count); // Create the learning algorithm for the ensemble classifier var teacher = new MulticlassSupportVectorLearning(svm, inputs, outputs); teacher.Algorithm = (machine, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(machine, classInputs, classOutputs); // Run the learning algorithm double error = teacher.Run(); // Classify all training instances foreach (var sample in database.Samples) { sample.RecognizedAs = svm.Compute(sample.Input); } foreach (DataGridViewRow row in gridSamples.Rows) { var sample = row.DataBoundItem as Sequence; row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ? Color.LightGreen : Color.White; } }
public void Treinar(DadosTreinamento dadosTreinamento) { var kernel = new Linear(1); var quantidadeCaracteristicas = dadosTreinamento.Entradas[0].Length; var quantidadeClasses = dadosTreinamento.Saidas.Distinct().Length; svm = new MulticlassSupportVectorMachine(quantidadeCaracteristicas, kernel, quantidadeClasses); var learning = new MulticlassSupportVectorLearning(svm, dadosTreinamento.Entradas, dadosTreinamento.Saidas) { Algorithm = (machine, inputs, outputs, a, b) => new SequentialMinimalOptimization(machine, inputs, outputs) { Complexity = 1.0 } }; learning.Run(); }
/*************************** Primary Methods *******************************/ public double learning(DataSet trainSet) { // Train Data Conversion var ingredient = convertToTrainIntputTable(trainSet); trainInputArray = (double[][])ingredient.Item1; trainOutputVector = (int[])ingredient.Item2; // Create the Multi-class learning algorithm for the SVM machine teacher = new MulticlassSupportVectorLearning(machine, trainInputArray, trainOutputVector); // Configure the learning algorithm to use SMO(Sequential Minimal Optimization) // to train the underlying SVMs in each of the binary class subproblems teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); // Run the learning algorithm and return learning error return teacher.Run(); }
public ClassificationAlgorithmBuildResult Build(TrainingExample[] trainingSet, ClassificationAlgorithmParams classificationAlgorithmParams) { var featuresDimensionality = trainingSet[0].Features.Length; var outputClassesCount = trainingSet.Max(x => x.ExpectedResult) + 1; var inputs = trainingSet.Select(example => example.Features.ToDoubleArray()).ToArray(); var outputs = trainingSet.Select(example => example.ExpectedResult).ToArray(); var classifier = new MulticlassSupportVectorMachine(featuresDimensionality, new Linear(), outputClassesCount); var teacher = new MulticlassSupportVectorLearning(classifier, inputs, outputs) { Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) }; teacher.Run(); var result = new SupportVectorMachineAlgorithm(classifier); return ClassificationAlgorithmBuildResult.Create(result, trainingSet); }
private void CreateAndTrainKSVM(IList<double[]> inputs, IList<int> outputs) { _ksvm = new MulticlassSupportVectorMachine(inputs[0].Length, Kernel, TargetDirectories.Count); MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(_ksvm, inputs.ToArray(), outputs.ToArray()); double complexity = SequentialMinimalOptimization.EstimateComplexity(Kernel, inputs.ToArray()); SelectionStrategy strategy = SelectionStrategy.Sequential; ml.Algorithm = (svm, classInputs, classOutputs, i, j) => { return new SequentialMinimalOptimization(svm, classInputs, classOutputs) { Complexity = complexity, Tolerance = Tolerance, CacheSize = CacheSize, Strategy = strategy, }; }; Console.WriteLine("Starting SVM training"); ml.Run(); Console.WriteLine("SVM trained"); }
/// <summary> /// Creates a Support Vector Machine and estimate /// its parameters using a learning algorithm. /// </summary> /// private void btnRunTraining_Click(object sender, EventArgs e) { if (dgvTrainingSource.Rows.Count == 0) { MessageBox.Show("Please load the training data before clicking this button"); return; } lbStatus.Text = "Gathering data. This may take a while..."; Application.DoEvents(); // Extract inputs and outputs int rows = dgvTrainingSource.Rows.Count; double[][] input = new double[rows][]; int[] output = new int[rows]; for (int i = 0; i < rows; i++) { input[i] = (double[])dgvTrainingSource.Rows[i].Cells["colTrainingFeatures"].Value; output[i] = (int)dgvTrainingSource.Rows[i].Cells["colTrainingLabel"].Value; } // Create the chosen kernel function // using the user interface parameters // IKernel kernel = createKernel(); // Extract training parameters from the interface double complexity = (double)numComplexity.Value; double tolerance = (double)numTolerance.Value; int cacheSize = (int)numCache.Value; SelectionStrategy strategy = (SelectionStrategy)cbStrategy.SelectedItem; // Create the Multi-class Support Vector Machine using the selected Kernel ksvm = new MulticlassSupportVectorMachine(1024, kernel, 10); // Create the learning algorithm using the machine and the training data MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, input, output) { // Configure the learning algorithm Algorithm = (svm, classInputs, classOutputs, i, j) => // Use Platt's Sequential Minimal Optimization algorithm new SequentialMinimalOptimization(svm, classInputs, classOutputs) { Complexity = complexity, Tolerance = tolerance, CacheSize = cacheSize, Strategy = strategy, Compact = (kernel is Linear) } }; lbStatus.Text = "Training the classifiers. This may take a (very) significant amount of time..."; Application.DoEvents(); Stopwatch sw = Stopwatch.StartNew(); // Train the machines. It should take a while. double error = ml.Run(); sw.Stop(); lbStatus.Text = String.Format( "Training complete ({0}ms, {1}er). Click Classify to test the classifiers.", sw.ElapsedMilliseconds, error); // Update the interface status btnClassifyVoting.Enabled = true; btnClassifyElimination.Enabled = true; btnCalibration.Enabled = true; // Populate the information tab with the machines dgvMachines.Rows.Clear(); int k = 1; for (int i = 0; i < 10; i++) { for (int j = 0; j < i; j++, k++) { var machine = ksvm[i, j]; int sv = machine.SupportVectors == null ? 0 : machine.SupportVectors.Length; int c = dgvMachines.Rows.Add(k, i + "-vs-" + j, sv, machine.Threshold); dgvMachines.Rows[c].Tag = machine; } } // approximate size in bytes = // number of support vectors * number of doubles in a support vector * size of double int bytes = ksvm.SupportVectorUniqueCount * 1024 * sizeof(double); float megabytes = bytes / (1024 * 1024); lbSize.Text = String.Format("{0} ({1} MB)", ksvm.SupportVectorUniqueCount, megabytes); }
public void RunTest3() { double[][] inputs = { // Tickets with the following structure should be assigned to location 0 new double[] { 1, 4, 2, 0, 1 }, // should be assigned to location 0 new double[] { 1, 3, 2, 0, 1 }, // should be assigned to location 0 // Tickets with the following structure should be assigned to location 1 new double[] { 3, 0, 1, 1, 1 }, // should be assigned to location 1 new double[] { 3, 0, 1, 0, 1 }, // should be assigned to location 1 // Tickets with the following structure should be assigned to location 2 new double[] { 0, 5, 5, 5, 5 }, // should be assigned to location 2 new double[] { 1, 5, 5, 5, 5 }, // should be assigned to location 2 // Tickets with the following structure should be assigned to location 3 new double[] { 1, 0, 0, 0, 0 }, // should be assigned to location 3 new double[] { 1, 0, 0, 0, 0 }, // should be assigned to location 3 }; int[] outputs = { 0, 0, // Those are the locations for the first two vectors above 1, 1, // Those are the locations for the next two vectors above 2, 2, // Those are the locations for the next two vectors above 3, 3, // Those are the locations for the last two vectors above }; // Since this is a simplification, a linear machine will suffice: IKernel kernel = new Linear(); // Create the machine for feature vectors of length 5, for 4 possible locations MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(5, kernel, 4); // Create a new learning algorithm to train the machine MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs); // Use the standard SMO algorithm target.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); // Train the machines double actual = target.Run(); // Compute the answer for all training samples for (int i = 0; i < inputs.Length; i++) { double[] answersWeights; double answer = machine.Compute(inputs[i], MulticlassComputeMethod.Voting, out answersWeights); // Assert it has been classified correctly Assert.AreEqual(outputs[i], answer); // Assert the most probable answer is indeed the correct one int imax; Matrix.Max(answersWeights, out imax); Assert.AreEqual(answer, imax); } }
public void multiclass_linear_smo_new_usage() { // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; // Create a one-vs-one learning algorithm using LIBLINEAR's L2-loss SVC dual var teacher = new MulticlassSupportVectorLearning<Linear>(); teacher.Learner = (p) => new SequentialMinimalOptimization<Linear>() { UseComplexityHeuristic = true }; #if DEBUG teacher.ParallelOptions.MaxDegreeOfParallelism = 1; #endif // Learn a machine var machine = teacher.Learn(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { double actual = machine.Decide(inputs[i]); double expected = outputs[i]; Assert.AreEqual(expected, actual); } }
public void ApplyTest2() { // Suppose we have a data table relating the age of // a person and its categorical classification, as // in "child", "adult" or "elder". // The Codification filter is able to extract those // string labels and transform them into discrete // symbols, assigning integer labels to each of them // such as "child" = 0, "adult" = 1, and "elder" = 3. // Create the aforementioned sample table DataTable table = new DataTable("Sample data"); table.Columns.Add("Age", typeof(int)); table.Columns.Add("Label", typeof(string)); // age label table.Rows.Add(10, "child"); table.Rows.Add(07, "child"); table.Rows.Add(04, "child"); table.Rows.Add(21, "adult"); table.Rows.Add(27, "adult"); table.Rows.Add(12, "child"); table.Rows.Add(79, "elder"); table.Rows.Add(40, "adult"); table.Rows.Add(30, "adult"); // Now, let's say we need to translate those text labels // into integer symbols. Let's use a Codification filter: Codification codebook = new Codification(table); // After that, we can use the codebook to "translate" // the text labels into discrete symbols, such as: int a = codebook.Translate("Label", "child"); // returns 0 int b = codebook.Translate("Label", "adult"); // returns 1 int c = codebook.Translate("Label", "elder"); // returns 2 // We can also do the reverse: string labela = codebook.Translate("Label", 0); // returns "child" string labelb = codebook.Translate("Label", 1); // returns "adult" string labelc = codebook.Translate("Label", 2); // returns "elder" // We can also process an entire data table at once: DataTable result = codebook.Apply(table); // The resulting table can be transformed to jagged array: double[][] matrix = Matrix.ToArray(result); // and the resulting matrix will be given by string str = matrix.ToString(CSharpJaggedMatrixFormatProvider.InvariantCulture); // str == new double[][] // { // new double[] { 10, 0 }, // new double[] { 7, 0 }, // new double[] { 4, 0 }, // new double[] { 21, 1 }, // new double[] { 27, 1 }, // new double[] { 12, 0 }, // new double[] { 79, 2 }, // new double[] { 40, 1 }, // new double[] { 30, 1 } // }; // Now we will be able to feed this matrix to any machine learning // algorithm without having to worry about text labels in our data: int classes = codebook["Label"].Symbols; // 3 classes (child, adult, elder) // Use the first column as input variables, // and the second column as outputs classes // double[][] inputs = matrix.GetColumns(0); int[] outputs = matrix.GetColumn(1).ToInt32(); // Create a multi-class SVM for 1 input (Age) and 3 classes (Label) var machine = new MulticlassSupportVectorMachine(inputs: 1, classes: classes); // Create a Multi-class learning algorithm for the machine var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs); // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => { return new SequentialMinimalOptimization(svm, classInputs, classOutputs) { Complexity = 1 }; }; // Run the learning algorithm double error = teacher.Run(); // After we have learned the machine, we can use it to classify // new data points, and use the codebook to translate the machine // outputs to the original text labels: string result1 = codebook.Translate("Label", machine.Compute(10)); // child string result2 = codebook.Translate("Label", machine.Compute(40)); // adult string result3 = codebook.Translate("Label", machine.Compute(70)); // elder Assert.AreEqual(0, a); Assert.AreEqual(1, b); Assert.AreEqual(2, c); Assert.AreEqual("child", labela); Assert.AreEqual("adult", labelb); Assert.AreEqual("elder", labelc); Assert.AreEqual("child", result1); Assert.AreEqual("adult", result2); Assert.AreEqual("elder", result3); }
public void RunTest2() { double[][] inputs = { new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; IKernel kernel = new Linear(); MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(4, kernel, 3); MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs); target.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); double actual = target.Run(); double expected = 0; Assert.AreEqual(expected, actual); for (int i = 0; i < inputs.Length; i++) { actual = machine.Compute(inputs[i]); expected = outputs[i]; Assert.AreEqual(expected, actual); } }
public void learn_test() { #region doc_learn // Generate always same random numbers Accord.Math.Random.Generator.Seed = 0; // The following is a simple auto association function in which // the last column of each input correspond to its own class. This // problem should be easily solved using a Linear kernel. // Sample input data double[][] inputs = { new double[] { 1, 2, 0 }, new double[] { 6, 2, 3 }, new double[] { 1, 1, 1 }, new double[] { 7, 6, 2 }, }; // Output for each of the inputs int[] outputs = { 0, 3, 1, 2 }; // Create the multi-class learning algorithm for the machine var teacher = new MulticlassSupportVectorLearning<Linear>() { // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. Learner = (param) => new SequentialMinimalOptimization<Linear>() { // If you would like to use other kernels, simply replace // the generic parameter to the desired kernel class, such // as for example, Polynomial or Gaussian: Kernel = new Linear() // use the Linear kernel } }; // Estimate the multi-class support vector machine using one-vs-one method MulticlassSupportVectorMachine<Linear> ovo = teacher.Learn(inputs, outputs); // Obtain class predictions for each sample int[] predicted = ovo.Decide(inputs); // Compute classification error double error = new ZeroOneLoss(outputs).Loss(predicted); #endregion Assert.AreEqual(0, error); Assert.IsTrue(predicted.IsEqual(outputs)); Assert.IsTrue(ovo.Scores(inputs[0]).IsEqual(new double[] { 0.62, -0.25, -0.59, -0.62 }, 1e-2)); Assert.IsTrue(ovo.Scores(inputs[1]).IsEqual(new double[] { -0.62, -0.57, -0.13, 0.62 }, 1e-2)); Assert.IsTrue(ovo.Scores(inputs[2]).IsEqual(new double[] { -0.25, 0.63, -0.63, -0.51 }, 1e-2)); }
public void multiclass_precomputed_matrix_smo() { #region doc_precomputed // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] trainInputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] trainOutputs = // those are the training set class labels { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; // Let's chose a kernel function Polynomial kernel = new Polynomial(2); // Get the kernel matrix for the training set double[][] K = kernel.ToJagged(trainInputs); // Create a pre-computed kernel var pre = new Precomputed(K); // Create a one-vs-one learning algorithm using SMO var teacher = new MulticlassSupportVectorLearning<Precomputed, int>() { Learner = (p) => new SequentialMinimalOptimization<Precomputed, int>() { Kernel = pre } }; #if DEBUG teacher.ParallelOptions.MaxDegreeOfParallelism = 1; #endif // Learn a machine var machine = teacher.Learn(pre.Indices, trainOutputs); // Compute the machine's prediction for the training set int[] trainPrediction = machine.Decide(pre.Indices); // Evaluate prediction error for the training set using mean accuracy (mAcc) double trainingError = new ZeroOneLoss(trainOutputs).Loss(trainPrediction); // Now let's compute the machine's prediction for a test set double[][] testInputs = // test-set inputs { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 }; int[] testOutputs = // those are the test set class labels { 0, 0, 1, 2, }; // Compute precomputed matrix between train and testing pre.Values = kernel.ToJagged2(trainInputs, testInputs); // Update the kernel machine.Kernel = pre; // Compute the machine's prediction for the test set int[] testPrediction = machine.Decide(pre.Indices); // Evaluate prediction error for the training set using mean accuracy (mAcc) double testError = new ZeroOneLoss(testOutputs).Loss(testPrediction); #endregion Assert.AreEqual(0, trainingError); Assert.AreEqual(0, testError); // Create a one-vs-one learning algorithm using SMO var teacher2 = new MulticlassSupportVectorLearning<Polynomial>() { Learner = (p) => new SequentialMinimalOptimization<Polynomial>() { Kernel = kernel } }; #if DEBUG teacher.ParallelOptions.MaxDegreeOfParallelism = 1; #endif // Learn a machine var expected = teacher2.Learn(trainInputs, trainOutputs); Assert.AreEqual(4, expected.NumberOfInputs); Assert.AreEqual(3, expected.NumberOfOutputs); Assert.AreEqual(0, machine.NumberOfInputs); Assert.AreEqual(3, machine.NumberOfOutputs); var machines = Enumerable.Zip(machine, expected, (a,b) => Tuple.Create(a.Value, b.Value)); foreach (var pair in machines) { var a = pair.Item1; var e = pair.Item2; Assert.AreEqual(0, a.NumberOfInputs); Assert.AreEqual(2, a.NumberOfOutputs); Assert.AreEqual(4, e.NumberOfInputs); Assert.AreEqual(2, e.NumberOfOutputs); Assert.IsTrue(a.Weights.IsEqual(e.Weights)); } }
public void SparseLinearTest() { MulticlassSupportVectorMachine<Linear> svm1; MulticlassSupportVectorMachine<Linear, Sparse<double>> svm2; { Accord.Math.Random.Generator.Seed = 0; MemoryStream file = new MemoryStream( Encoding.Default.GetBytes(Resources.iris_scale)); var reader = new SparseReader(file, Encoding.Default); var samples = reader.ReadDenseToEnd(); double[][] x = samples.Item1; int[] y = samples.Item2.ToMulticlass(); var learner = new MulticlassSupportVectorLearning<Linear>() { Learner = (p) => new LinearDualCoordinateDescent<Linear>() }; svm1 = learner.Learn(x, y); } { Accord.Math.Random.Generator.Seed = 0; MemoryStream file = new MemoryStream( Encoding.Default.GetBytes(Resources.iris_scale)); // Create a new Sparse Sample Reader to read any given file, // passing the correct dense sample size in the constructor var reader = new SparseReader(file, Encoding.Default); var samples = reader.ReadSparseToEnd(); Sparse<double>[] x = samples.Item1; int[] y = samples.Item2.ToMulticlass(); var learner = new MulticlassSupportVectorLearning<Linear, Sparse<double>>() { Learner = (p) => new LinearDualCoordinateDescent<Linear, Sparse<double>>() }; svm2 = learner.Learn(x, y); } Assert.AreEqual(svm1.Models.Length, svm2.Models.Length); for (int i = 0; i < svm1.Models.Length; i++) { var ma = svm1[i].Value; var mb = svm2[i].Value; Assert.IsTrue(ma.Weights.IsEqual(mb.Weights)); Assert.AreEqual(ma.SupportVectors.Length, mb.SupportVectors.Length); for (int j = 0; j < ma.SupportVectors.Length; j++) { double[] expected = ma.SupportVectors[j]; double[] actual = mb.SupportVectors[j].ToDense(4); Assert.IsTrue(expected.IsEqual(actual, 1e-5)); } } }
/// <summary> /// Core machine learning method for parsing csv data, training the svm, and calculating the accuracy. /// </summary> /// <param name="path">string - path to csv file (training, csv, test).</param> /// <param name="count">int - max number of rows to process. This is useful for preparing learning curves, by using gradually increasing values. Use Int32.MaxValue to read all rows.</param> /// <param name="machine">MulticlassSupportVectorMachine - Leave null for initial training.</param> /// <returns>MulticlassSupportVectorMachine</returns> private static MulticlassSupportVectorMachine RunSvm(string path, int count, MulticlassSupportVectorMachine machine = null) { double[][] inputs; int[] outputs; // Parse the csv file to get inputs and outputs. ReadData(path, count, out inputs, out outputs, new FrontLabelParser()); if (machine == null) { // Training. MulticlassSupportVectorLearning teacher = null; // Create the svm. machine = new MulticlassSupportVectorMachine(_pixelCount, new Gaussian(_sigma), _classCount); teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs); teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) { CacheSize = 0 }; // Train the svm. Utility.ShowProgressFor(() => teacher.Run(), "Training"); } // Calculate accuracy. double accuracy = Utility.ShowProgressFor<double>(() => Accuracy.CalculateAccuracy(machine, inputs, outputs), "Calculating Accuracy"); Console.WriteLine("Accuracy: " + Math.Round(accuracy * 100, 2) + "%"); return machine; }
/// <summary> /// Calibrates the current Support Vector Machine to produce /// probabilistic outputs using ProbabilisticOutputLearning. /// </summary> /// private void btnRunCalibration_Click(object sender, EventArgs e) { if (ksvm == null) { MessageBox.Show("Please train the machines first."); return; } // Extract inputs and outputs int rows = dgvTrainingSource.Rows.Count; double[][] input = new double[rows][]; int[] output = new int[rows]; for (int i = 0; i < rows; i++) { input[i] = (double[])dgvTrainingSource.Rows[i].Cells["colTrainingFeatures"].Value; output[i] = (int)dgvTrainingSource.Rows[i].Cells["colTrainingLabel"].Value; } // Create the calibration algorithm using the training data var ml = new MulticlassSupportVectorLearning(ksvm, input, output) { // Configure the calibration algorithm Algorithm = (svm, classInputs, classOutputs, i, j) => new ProbabilisticOutputLearning(svm, classInputs, classOutputs) }; lbStatus.Text = "Calibrating the classifiers. This may take a (very) significant amount of time..."; Application.DoEvents(); Stopwatch sw = Stopwatch.StartNew(); // Train the machines. It should take a while. double error = ml.Run(); sw.Stop(); lbStatus.Text = String.Format( "Calibration complete ({0}ms, {1}er). Click Classify to test the classifiers.", sw.ElapsedMilliseconds, error); btnClassifyVoting.Enabled = true; }
// Creates the Learning function for MC-SVM : SequentialMinimalOptimization with a hardline Complexity of 0.1 public double MCSVMLearn() { mcsvmLearning = new MulticlassSupportVectorLearning(mcsvm, Inputs, Outputs); mcsvmLearning.Algorithm = (machine, inputs, outputs, class1, class2) => new LinearDualCoordinateDescent(machine, inputs, outputs) { Complexity = 0.007 }; return mcsvmLearning.Run(); }
public void SerializeTest1() { double[][] inputs = { new double[] { 1, 4, 2, 0, 1 }, new double[] { 1, 3, 2, 0, 1 }, new double[] { 3, 0, 1, 1, 1 }, new double[] { 3, 0, 1, 0, 1 }, new double[] { 0, 5, 5, 5, 5 }, new double[] { 1, 5, 5, 5, 5 }, new double[] { 1, 0, 0, 0, 0 }, new double[] { 1, 0, 0, 0, 0 }, }; int[] outputs = { 0, 0, 1, 1, 2, 2, 3, 3, }; IKernel kernel = new Linear(); var msvm = new MulticlassSupportVectorMachine(5, kernel, 4); var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs); smo.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); double expected = smo.Run(); MemoryStream stream = new MemoryStream(); // Save the machines msvm.Save(stream); // Rewind stream.Seek(0, SeekOrigin.Begin); // Reload the machines var target = MulticlassSupportVectorMachine.Load(stream); double actual; int count = 0; // Compute errors for (int i = 0; i < inputs.Length; i++) { double y = target.Compute(inputs[i]); if (y != outputs[i]) count++; } actual = (double)count / inputs.Length; Assert.AreEqual(expected, actual); Assert.AreEqual(msvm.Inputs, target.Inputs); Assert.AreEqual(msvm.Classes, target.Classes); for (int i = 0; i < msvm.Machines.Length; i++) { for (int j = 0; j < msvm.Machines.Length; j++) { var a = msvm[i, j]; var b = target[i, j]; if (i != j) { Assert.IsTrue(a.SupportVectors.IsEqual(b.SupportVectors)); } else { Assert.IsNull(a); Assert.IsNull(b); } } } }
public void ComputeTest1() { double[][] inputs = { new double[] { 1, 4, 2, 0, 1 }, new double[] { 1, 3, 2, 0, 1 }, new double[] { 3, 0, 1, 1, 1 }, new double[] { 3, 0, 1, 0, 1 }, new double[] { 0, 5, 5, 5, 5 }, new double[] { 1, 5, 5, 5, 5 }, new double[] { 1, 0, 0, 0, 0 }, new double[] { 1, 0, 0, 0, 0 }, }; int[] outputs = { 0, 0, 1, 1, 2, 2, 3, 3, }; IKernel kernel = new Polynomial(2); var msvm = new MulticlassSupportVectorMachine(5, kernel, 4); var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs); smo.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) { Complexity = 1 }; Assert.AreEqual(0, msvm.GetLastKernelEvaluations()); double error = smo.Run(); Assert.AreEqual(6, msvm.GetLastKernelEvaluations()); int[] evals = new int[inputs.Length]; int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 }; for (int i = 0; i < inputs.Length; i++) { double expected = outputs[i]; double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination); Assert.AreEqual(expected, actual); evals[i] = msvm.GetLastKernelEvaluations(); } for (int i = 0; i < evals.Length; i++) Assert.AreEqual(evals[i], evalexp[i]); for (int i = 0; i < inputs.Length; i++) { double expected = outputs[i]; double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting); Assert.AreEqual(expected, actual); evals[i] = msvm.GetLastKernelEvaluations(); } for (int i = 0; i < evals.Length; i++) Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i], 1); }
public void LinearTest() { // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; // Create a new multi-class linear support vector machine for 3 classes var machine = new MulticlassSupportVectorMachine(inputs: 4, classes: 3); // Create a one-vs-one learning algorithm using LIBLINEAR's L2-loss SVC dual var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs) { Algorithm = (svm, classInputs, classOutputs, i, j) => new LinearDualCoordinateDescent(svm, classInputs, classOutputs) { Loss = Loss.L2 } }; // Teach the machine double error = teacher.Run(); // should be 0. Assert.AreEqual(0, error); for (int i = 0; i < inputs.Length; i++) { error = machine.Compute(inputs[i]); double expected = outputs[i]; Assert.AreEqual(expected, error); } }
public void ComputeTest2() { double[][] input = { new double[] { 1, 4, 2, 0, 1 }, new double[] { 1, 3, 2, 0, 1 }, new double[] { 3, 0, 1, 1, 1 }, new double[] { 3, 0, 1, 0, 1 }, new double[] { 0, 5, 5, 5, 5 }, new double[] { 1, 5, 5, 5, 5 }, new double[] { 1, 0, 0, 0, 0 }, new double[] { 1, 0, 0, 0, 0 }, }; int[] output = { 0, 0, 1, 1, 2, 2, 3, 3, }; IKernel kernel = new Polynomial(2); int classes = 4; int inputs = 5; // Create the Multi-class Support Vector Machine using the selected Kernel var msvm = new MulticlassSupportVectorMachine(inputs, kernel, classes); // Create the learning algorithm using the machine and the training data var ml = new MulticlassSupportVectorLearning(msvm, input, output); // Configure the learning algorithm ml.Algorithm = (svm, classInputs, classOutputs, i, j) => { var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs) { Complexity = 1 }; return smo; }; Assert.AreEqual(0, msvm.GetLastKernelEvaluations()); // Executes the training algorithm double error = ml.Run(); Assert.AreEqual(6, msvm.GetLastKernelEvaluations()); int[] evals = new int[input.Length]; int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 }; #if NET35 AForge.Parallel.For(0, input.Length, i => #else Parallel.For(0, input.Length, i => #endif { double[] data = input[i]; double[] responses; int num = msvm.Compute(data, MulticlassComputeMethod.Elimination, out responses); Assert.AreEqual(output[i], num); evals[i] = msvm.GetLastKernelEvaluations(); }); for (int i = 0; i < evals.Length; i++) Assert.AreEqual(evals[i], evalexp[i]); #if NET35 AForge.Parallel.For(0, input.Length, i => #else Parallel.For(0, input.Length, i => #endif { double[] data = input[i]; double[] responses; int num = msvm.Compute(data, MulticlassComputeMethod.Voting, out responses); Assert.AreEqual(output[i], num); evals[i] = msvm.GetLastKernelEvaluations(); }); for (int i = 0; i < evals.Length; i++) Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i]); }
/// <summary> /// Loads the stored gestures and learns a SVM using those data. /// </summary> /// private void btnLearn_Click(object sender, EventArgs e) { if (gridSamples.Rows.Count == 0) { MessageBox.Show("Please load or insert some data first."); return; } BindingList<Sequence> samples = database.Samples; BindingList<String> classes = database.Classes; double[][] inputs = new double[samples.Count][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } // Creates a new learning machine. Please note how the number of inputs is given // as zero: this means the machine will accept variable-length sequences as input. // svm = new MulticlassSupportVectorMachine(inputs: 0, kernel: new DynamicTimeWarping(2), classes: classes.Count); // Create the learning algorithm to teach the multiple class classifier var teacher = new MulticlassSupportVectorLearning(svm, inputs, outputs) { // Setup the learning algorithm for each 1x1 subproblem Algorithm = (machine, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(machine, classInputs, classOutputs) }; // Run the learning algorithm double error = teacher.Run(); // Classify all training instances foreach (var sample in database.Samples) { sample.RecognizedAs = svm.Compute(sample.Input); } foreach (DataGridViewRow row in gridSamples.Rows) { var sample = row.DataBoundItem as Sequence; row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ? Color.LightGreen : Color.White; } }
/// <summary> /// Creates the Support Vector Machines that will identify images based on /// their Bag-of-Visual-Words feature vector representation. /// </summary> /// private void btnCreateVectorMachines_Click(object sender, EventArgs e) { double[][] inputs; int[] outputs; getData(out inputs, out outputs); int classes = outputs.Distinct().Count(); var kernel = getKernel(); // Create the Multi-class Support Vector Machine using the selected Kernel ksvm = new MulticlassSupportVectorMachine(inputs[0].Length, kernel, classes); // Create the learning algorithm using the machine and the training data MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, inputs, outputs); // Extract training parameters from the interface double complexity = (double)numComplexity.Value; double tolerance = (double)numTolerance.Value; int cacheSize = (int)numCache.Value; SelectionStrategy strategy = (SelectionStrategy)cbStrategy.SelectedItem; // Configure the learning algorithm ml.Algorithm = (svm, classInputs, classOutputs, i, j) => { return new SequentialMinimalOptimization(svm, classInputs, classOutputs) { Complexity = complexity, Tolerance = tolerance, CacheSize = cacheSize, Strategy = strategy, }; }; lbStatus.Text = "Training the classifiers. This may take a (very) significant amount of time..."; Application.DoEvents(); Stopwatch sw = Stopwatch.StartNew(); // Train the machines. It should take a while. double error = ml.Run(); sw.Stop(); lbStatus.Text = String.Format( "Training complete ({0}ms, {1}er). Click Classify to test the classifiers.", sw.ElapsedMilliseconds, error); btnClassifyElimination.Enabled = true; // Populate the information tab with the machines dgvMachines.Rows.Clear(); int k = 1; for (int i = 0; i < classes; i++) { for (int j = 0; j < i; j++, k++) { var machine = ksvm[i, j]; int sv = machine.SupportVectors == null ? 0 : machine.SupportVectors.Length; int c = dgvMachines.Rows.Add(k, i + "-vs-" + j, sv, machine.Threshold); dgvMachines.Rows[c].Tag = machine; } } // approximate size in bytes = // number of support vectors * // number of doubles in a support vector * // size of double int bytes = ksvm.SupportVectorUniqueCount * 1024 * sizeof(double); float megabytes = bytes / (1024 * 1024); lbSize.Text = String.Format("{0} ({1} MB)", ksvm.SupportVectorUniqueCount, megabytes); }
public void RunTest2() { double[][] inputs = { new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; IKernel kernel = new Linear(); MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(4, kernel, 3); MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs); target.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); double error1 = target.Run(); Assert.AreEqual(0, error1); target.Algorithm = (svm, classInputs, classOutputs, i, j) => new ProbabilisticOutputCalibration(svm, classInputs, classOutputs); double error2 = target.Run(); Assert.AreEqual(0, error2); }