public void SerializeTest1() { double[][] inputs = { new double[] { 1, 4, 2, 0, 1 }, new double[] { 1, 3, 2, 0, 1 }, new double[] { 3, 0, 1, 1, 1 }, new double[] { 3, 0, 1, 0, 1 }, new double[] { 0, 5, 5, 5, 5 }, new double[] { 1, 5, 5, 5, 5 }, new double[] { 1, 0, 0, 0, 0 }, new double[] { 1, 0, 0, 0, 0 }, }; int[] outputs = { 0, 0, 1, 1, 2, 2, 3, 3, }; IKernel kernel = new Linear(); var msvm = new MultilabelSupportVectorMachine(5, kernel, 4); var smo = new MultilabelSupportVectorLearning(msvm, inputs, outputs); smo.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); double expected = smo.Run(); MemoryStream stream = new MemoryStream(); // Save the machines msvm.Save(stream); // Rewind stream.Seek(0, SeekOrigin.Begin); // Reload the machines var target = MultilabelSupportVectorMachine.Load(stream); double actual; int count = 0; // Compute errors for (int i = 0; i < inputs.Length; i++) { double[] responses; target.Compute(inputs[i], out responses); int y; responses.Max(out y); if (y != outputs[i]) count++; } actual = (double)count / inputs.Length; Assert.AreEqual(expected, actual); Assert.AreEqual(msvm.Inputs, target.Inputs); Assert.AreEqual(msvm.Classes, target.Classes); for (int i = 0; i < msvm.Machines.Length; i++) { var a = msvm[i]; var b = target[i]; Assert.IsTrue(a.SupportVectors.IsEqual(b.SupportVectors)); } }
public void RunTest() { Accord.Math.Tools.SetupGenerator(0); // Sample data // The following is a simple auto association function // in which each input correspond to its own class. This // problem should be easily solved using a Linear kernel. // Sample input data double[][] inputs = { new double[] { 0, 0 }, new double[] { 0, 1 }, new double[] { 1, 0 }, new double[] { 1, 1 }, }; // Outputs for each of the inputs int[][] outputs = { // and or nand xor new[] { -1, -1, +1, +1 }, new[] { -1, +1, +1, -1 }, new[] { -1, +1, +1, -1 }, new[] { +1, +1, -1, +1 }, }; // Create a new Linear kernel IKernel linear = new Linear(); // Create a new Multi-class Support Vector Machine for one input, // using the linear kernel and four disjoint classes. var machine = new MultilabelSupportVectorMachine(inputs: 2, kernel: linear, classes: 4); // Create the Multi-class learning algorithm for the machine var teacher = new MultilabelSupportVectorLearning(machine, inputs, outputs); // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) { // Create a hard SVM Complexity = 10000.0 }; // Run the learning algorithm double error = teacher.Run(); // only xor is not learnable by // a hard-margin linear machine Assert.AreEqual(2 / 16.0, error); }
private static void multilabelsvm() { // Sample data // The following is simple auto association function // where each input correspond to its own class. This // problem should be easily solved by a Linear kernel. // Sample input data double[][] inputs = { new double[] { 0 }, new double[] { 3 }, new double[] { 1 }, new double[] { 2 }, }; // Outputs for each of the inputs int[][] outputs = { new[] { -1, 1, -1 }, new[] { -1, -1, 1 }, new[] { 1, 1, -1 }, new[] { -1, -1, -1 }, }; // Create a new Linear kernel IKernel kernel = new Linear(); // Create a new Multi-class Support Vector Machine with one input, // using the linear kernel and for four disjoint classes. var machine = new MultilabelSupportVectorMachine(1, kernel, 3); // Create the Multi-label learning algorithm for the machine var teacher = new MultilabelSupportVectorLearning(machine, inputs, outputs); // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. teacher.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) { // Create a hard SVM Complexity = 10000.0 }; // Run the learning algorithm double error = teacher.Run(); int[][] answers = inputs.Apply(machine.Compute); }
public void ComputeTest1() { double[][] inputs = { new double[] { 1, 4, 2, 0, 1 }, new double[] { 1, 3, 2, 0, 1 }, new double[] { 3, 0, 1, 1, 1 }, new double[] { 3, 0, 1, 0, 1 }, new double[] { 0, 5, 5, 5, 5 }, new double[] { 1, 5, 5, 5, 5 }, new double[] { 1, 0, 0, 0, 0 }, new double[] { 1, 0, 0, 0, 0 }, }; int[] outputs = { 0, 0, 1, 1, 2, 2, 3, 3, }; IKernel kernel = new Polynomial(2); var msvm = new MultilabelSupportVectorMachine(5, kernel, 4); var smo = new MultilabelSupportVectorLearning(msvm, inputs, outputs); smo.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs); Assert.AreEqual(0, msvm.GetLastKernelEvaluations()); double error = smo.Run(); Assert.AreEqual(0, error); int[] evals = new int[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { double expected = outputs[i]; double[] responses; msvm.Compute(inputs[i], out responses); int actual; responses.Max(out actual); Assert.AreEqual(expected, actual); evals[i] = msvm.GetLastKernelEvaluations(); } for (int i = 0; i < evals.Length; i++) Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i]); }
public void multilabel_linear_smo_new_usage() { // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; // Create a one-vs-one learning algorithm using LIBLINEAR's L2-loss SVC dual var teacher = new MultilabelSupportVectorLearning<Linear>(); teacher.Learner = (p) => new SequentialMinimalOptimization<Linear>() { UseComplexityHeuristic = true }; #if DEBUG teacher.ParallelOptions.MaxDegreeOfParallelism = 1; #endif // Learn a machine var machine = teacher.Learn(inputs, outputs); int[] actual = machine.Decide(inputs).ArgMax(dimension: 1); outputs[13] = 0; Assert.IsTrue(actual.IsEqual(outputs)); }
public void multilabel_calibration_generic_kernel() { // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, }; // Create the multi-class learning algorithm for the machine var teacher = new MultilabelSupportVectorLearning<IKernel>() { // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. Learner = (param) => new SequentialMinimalOptimization<IKernel>() { UseKernelEstimation = false, Kernel = Gaussian.FromGamma(0.5) } }; // Learn a machine var machine = teacher.Learn(inputs, outputs); // Create the multi-class learning algorithm for the machine var calibration = new MultilabelSupportVectorLearning<IKernel>(machine) { // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. Learner = (p) => new ProbabilisticOutputCalibration<IKernel>(p.Model) }; // Configure parallel execution options calibration.ParallelOptions.MaxDegreeOfParallelism = 1; // Learn a machine calibration.Learn(inputs, outputs); // Obtain class predictions for each sample bool[][] predicted = machine.Decide(inputs); // Get class scores for each sample double[][] scores = machine.Scores(inputs); // Get log-likelihoods (should be same as scores) double[][] logl = machine.LogLikelihoods(inputs); // Get probability for each sample double[][] prob = machine.Probabilities(inputs); // Compute classification error using mean accuracy (mAcc) double error = new HammingLoss(outputs).Loss(predicted); double loss = new CategoryCrossEntropyLoss(outputs).Loss(prob); string a = scores.ToCSharp(); string b = logl.ToCSharp(); string c = prob.ToCSharp(); double[][] expectedScores = { new double[] { 1.85316017783605, -2.59688389729331, -2.32170102153988 }, new double[] { 1.84933597524124, -1.99399145231446, -2.2920299547693 }, new double[] { 1.44477953581274, -1.98592298465108, -2.27356092239125 }, new double[] { 1.85316017783605, -2.59688389729331, -2.32170102153988 }, new double[] { 1.84933597524124, -1.99399145231446, -2.2920299547693 }, new double[] { -2.40815576360914, 0.328362962196791, -0.932721757919691 }, new double[] { -2.13111157264226, 1.809192096031, -2.2920299547693 }, new double[] { -2.13111157264226, 1.809192096031, -2.2920299547693 }, new double[] { -2.14888646926108, -1.99399145231447, 1.33101148524982 }, new double[] { -2.12915064678299, -1.98592298465108, 1.3242171079396 }, new double[] { -1.47197826667149, -1.96368715704762, 0.843414180834243 }, new double[] { -2.14221021749314, -2.83117892529093, 2.61354519154994 } }; double[][] expectedLogL = { new double[] { 1.85316017783605, -2.59688389729331, -2.32170102153988 }, new double[] { 1.84933597524124, -1.99399145231446, -2.2920299547693 }, new double[] { 1.44477953581274, -1.98592298465108, -2.27356092239125 }, new double[] { 1.85316017783605, -2.59688389729331, -2.32170102153988 }, new double[] { 1.84933597524124, -1.99399145231446, -2.2920299547693 }, new double[] { -2.40815576360914, 0.328362962196791, -0.932721757919691 }, new double[] { -2.13111157264226, 1.809192096031, -2.2920299547693 }, new double[] { -2.13111157264226, 1.809192096031, -2.2920299547693 }, new double[] { -2.14888646926108, -1.99399145231447, 1.33101148524982 }, new double[] { -2.12915064678299, -1.98592298465108, 1.3242171079396 }, new double[] { -1.47197826667149, -1.96368715704762, 0.843414180834243 }, new double[] { -2.14221021749314, -2.83117892529093, 2.61354519154994 } }; double[][] expectedProbs = { new double[] { 6.37994947365835, 0.0745053832890827, 0.0981065622139132 }, new double[] { 6.35559784678136, 0.136150899620619, 0.101061104020747 }, new double[] { 4.24091706941419, 0.137253872418087, 0.102944947658882 }, new double[] { 6.37994947365835, 0.0745053832890827, 0.0981065622139132 }, new double[] { 6.35559784678136, 0.136150899620619, 0.101061104020747 }, new double[] { 0.0899810880411361, 1.38869292386051, 0.393481290780948 }, new double[] { 0.118705270957796, 6.10551277113228, 0.101061104020747 }, new double[] { 0.118705270957796, 6.10551277113228, 0.101061104020747 }, new double[] { 0.116613938707895, 0.136150899620619, 3.78486979203385 }, new double[] { 0.118938271567046, 0.137253872418087, 3.75924112261421 }, new double[] { 0.229471080877097, 0.140340010119971, 2.3242889884131 }, new double[] { 0.11739508739354, 0.0589433229176013, 13.6473476521179 } }; int[] actual = predicted.ArgMax(dimension: 1); Assert.IsTrue(actual.IsEqual(outputs)); // Must be exactly the same as test above Assert.AreEqual(0, error); Assert.AreEqual(0.5, ((Gaussian)machine[0].Kernel).Gamma); Assert.AreEqual(0.5, ((Gaussian)machine[1].Kernel).Gamma); Assert.AreEqual(0.5, ((Gaussian)machine[2].Kernel).Gamma); Assert.AreEqual(-18.908706961799737, loss); Assert.IsTrue(expectedScores.IsEqual(scores, 1e-10)); Assert.IsTrue(expectedLogL.IsEqual(logl, 1e-10)); Assert.IsTrue(expectedProbs.IsEqual(prob, 1e-10)); }
public void multilabel_linear_new_usage() { #region doc_learn_ldcd // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; // Create a one-vs-one multi-class SVM learning algorithm var teacher = new MultilabelSupportVectorLearning<Linear>() { // using LIBLINEAR's L2-loss SVC dual for each SVM Learner = (p) => new LinearDualCoordinateDescent() { Loss = Loss.L2 } }; // Configure parallel execution options teacher.ParallelOptions.MaxDegreeOfParallelism = 1; // Learn a machine var machine = teacher.Learn(inputs, outputs); // Obtain class predictions for each sample bool[][] predicted = machine.Decide(inputs); // Compute classification error using mean accuracy (mAcc) double error = new HammingLoss(outputs).Loss(predicted); #endregion Assert.AreEqual(0, error); Assert.IsTrue(predicted.ArgMax(dimension:1 ).IsEqual(outputs)); }
public void serialize_reload_new_version() { double[][] inputs = { new double[] { 1, 4, 2, 0, 1 }, new double[] { 1, 3, 2, 0, 1 }, new double[] { 3, 0, 1, 1, 1 }, new double[] { 3, 0, 1, 0, 1 }, new double[] { 0, 5, 5, 5, 5 }, new double[] { 1, 5, 5, 5, 5 }, new double[] { 1, 0, 0, 0, 0 }, new double[] { 1, 0, 0, 0, 0 }, }; int[] outputs = { 0, 0, 1, 1, 2, 2, 3, 3, }; IKernel kernel = new Linear(); var msvm = new MultilabelSupportVectorMachine(5, kernel, 4); var smo = new MultilabelSupportVectorLearning(msvm, inputs, outputs); smo.Algorithm = (svm, classInputs, classOutputs, i, j) => new SequentialMinimalOptimization(svm, classInputs, classOutputs) { Complexity = 1 }; double expected = smo.Run(); // Save the machines var bytes = msvm.Save(); // Reload the machines var target = Serializer.Load<MultilabelSupportVectorMachine>(bytes); double actual; int count = 0; // Compute errors for (int i = 0; i < inputs.Length; i++) { double[] responses; target.Compute(inputs[i], out responses); int y; responses.Max(out y); if (y != outputs[i]) count++; } actual = (double)count / inputs.Length; Assert.AreEqual(expected, actual); Assert.AreEqual(msvm.Inputs, target.Inputs); Assert.AreEqual(msvm.Classes, target.Classes); for (int i = 0; i < msvm.Machines.Length; i++) { var a = msvm[i]; var b = target[i]; Assert.AreEqual(a.Threshold, b.Threshold); Assert.AreEqual(a.NumberOfInputs, b.NumberOfInputs); Assert.AreEqual(a.NumberOfOutputs, b.NumberOfOutputs); Assert.IsTrue(a.Weights.IsEqual(b.Weights)); Assert.IsTrue(a.SupportVectors.IsEqual(b.SupportVectors)); } }
public void LinearComputeTest1() { double[][] inputs = { new double[] { 1, 4, 2, 0, 1 }, new double[] { 1, 3, 2, 0, 1 }, new double[] { 3, 0, 1, 1, 1 }, new double[] { 3, 0, 1, 0, 1 }, new double[] { 0, 5, 5, 5, 5 }, new double[] { 1, 5, 5, 5, 5 }, new double[] { 1, 0, 0, 0, 0 }, new double[] { 1, 0, 0, 0, 0 }, }; int[] outputs = { 0, 0, 1, 1, 2, 2, 3, 3, }; var msvm = new MultilabelSupportVectorMachine(5, 4); var smo = new MultilabelSupportVectorLearning(msvm, inputs, outputs); smo.Algorithm = (svm, classInputs, classOutputs, i, j) => new LinearNewtonMethod(svm, classInputs, classOutputs) { Complexity = 1 }; Assert.AreEqual(0, msvm.GetLastKernelEvaluations()); #if DEBUG smo.ParallelOptions.MaxDegreeOfParallelism = 1; msvm.ParallelOptions.MaxDegreeOfParallelism = 1; #endif double error = smo.Run(); Assert.AreEqual(0.125, error); int[] evals = new int[inputs.Length]; int[] y = new int[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { double expected = outputs[i]; double[] responses; msvm.Compute(inputs[i], out responses); int actual; responses.Max(out actual); y[i] = actual; if (i < 6) { Assert.AreEqual(expected, actual); evals[i] = msvm.GetLastKernelEvaluations(); } else { Assert.AreNotEqual(expected, actual); evals[i] = msvm.GetLastKernelEvaluations(); } } for (int i = 0; i < evals.Length; i++) Assert.AreEqual(0, evals[i]); for (int i = 0; i < inputs.Length; i++) { int actual; msvm.Scores(inputs[i], out actual); Assert.AreEqual(y[i], actual); } }