public void multilabel_calibration_generic_kernel() { // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, }; // Create the multi-class learning algorithm for the machine var teacher = new MultilabelSupportVectorLearning<IKernel>() { // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. Learner = (param) => new SequentialMinimalOptimization<IKernel>() { UseKernelEstimation = false, Kernel = Gaussian.FromGamma(0.5) } }; // Learn a machine var machine = teacher.Learn(inputs, outputs); // Create the multi-class learning algorithm for the machine var calibration = new MultilabelSupportVectorLearning<IKernel>(machine) { // Configure the learning algorithm to use SMO to train the // underlying SVMs in each of the binary class subproblems. Learner = (p) => new ProbabilisticOutputCalibration<IKernel>(p.Model) }; // Configure parallel execution options calibration.ParallelOptions.MaxDegreeOfParallelism = 1; // Learn a machine calibration.Learn(inputs, outputs); // Obtain class predictions for each sample bool[][] predicted = machine.Decide(inputs); // Get class scores for each sample double[][] scores = machine.Scores(inputs); // Get log-likelihoods (should be same as scores) double[][] logl = machine.LogLikelihoods(inputs); // Get probability for each sample double[][] prob = machine.Probabilities(inputs); // Compute classification error using mean accuracy (mAcc) double error = new HammingLoss(outputs).Loss(predicted); double loss = new CategoryCrossEntropyLoss(outputs).Loss(prob); string a = scores.ToCSharp(); string b = logl.ToCSharp(); string c = prob.ToCSharp(); double[][] expectedScores = { new double[] { 1.85316017783605, -2.59688389729331, -2.32170102153988 }, new double[] { 1.84933597524124, -1.99399145231446, -2.2920299547693 }, new double[] { 1.44477953581274, -1.98592298465108, -2.27356092239125 }, new double[] { 1.85316017783605, -2.59688389729331, -2.32170102153988 }, new double[] { 1.84933597524124, -1.99399145231446, -2.2920299547693 }, new double[] { -2.40815576360914, 0.328362962196791, -0.932721757919691 }, new double[] { -2.13111157264226, 1.809192096031, -2.2920299547693 }, new double[] { -2.13111157264226, 1.809192096031, -2.2920299547693 }, new double[] { -2.14888646926108, -1.99399145231447, 1.33101148524982 }, new double[] { -2.12915064678299, -1.98592298465108, 1.3242171079396 }, new double[] { -1.47197826667149, -1.96368715704762, 0.843414180834243 }, new double[] { -2.14221021749314, -2.83117892529093, 2.61354519154994 } }; double[][] expectedLogL = { new double[] { 1.85316017783605, -2.59688389729331, -2.32170102153988 }, new double[] { 1.84933597524124, -1.99399145231446, -2.2920299547693 }, new double[] { 1.44477953581274, -1.98592298465108, -2.27356092239125 }, new double[] { 1.85316017783605, -2.59688389729331, -2.32170102153988 }, new double[] { 1.84933597524124, -1.99399145231446, -2.2920299547693 }, new double[] { -2.40815576360914, 0.328362962196791, -0.932721757919691 }, new double[] { -2.13111157264226, 1.809192096031, -2.2920299547693 }, new double[] { -2.13111157264226, 1.809192096031, -2.2920299547693 }, new double[] { -2.14888646926108, -1.99399145231447, 1.33101148524982 }, new double[] { -2.12915064678299, -1.98592298465108, 1.3242171079396 }, new double[] { -1.47197826667149, -1.96368715704762, 0.843414180834243 }, new double[] { -2.14221021749314, -2.83117892529093, 2.61354519154994 } }; double[][] expectedProbs = { new double[] { 6.37994947365835, 0.0745053832890827, 0.0981065622139132 }, new double[] { 6.35559784678136, 0.136150899620619, 0.101061104020747 }, new double[] { 4.24091706941419, 0.137253872418087, 0.102944947658882 }, new double[] { 6.37994947365835, 0.0745053832890827, 0.0981065622139132 }, new double[] { 6.35559784678136, 0.136150899620619, 0.101061104020747 }, new double[] { 0.0899810880411361, 1.38869292386051, 0.393481290780948 }, new double[] { 0.118705270957796, 6.10551277113228, 0.101061104020747 }, new double[] { 0.118705270957796, 6.10551277113228, 0.101061104020747 }, new double[] { 0.116613938707895, 0.136150899620619, 3.78486979203385 }, new double[] { 0.118938271567046, 0.137253872418087, 3.75924112261421 }, new double[] { 0.229471080877097, 0.140340010119971, 2.3242889884131 }, new double[] { 0.11739508739354, 0.0589433229176013, 13.6473476521179 } }; int[] actual = predicted.ArgMax(dimension: 1); Assert.IsTrue(actual.IsEqual(outputs)); // Must be exactly the same as test above Assert.AreEqual(0, error); Assert.AreEqual(0.5, ((Gaussian)machine[0].Kernel).Gamma); Assert.AreEqual(0.5, ((Gaussian)machine[1].Kernel).Gamma); Assert.AreEqual(0.5, ((Gaussian)machine[2].Kernel).Gamma); Assert.AreEqual(-18.908706961799737, loss); Assert.IsTrue(expectedScores.IsEqual(scores, 1e-10)); Assert.IsTrue(expectedLogL.IsEqual(logl, 1e-10)); Assert.IsTrue(expectedProbs.IsEqual(prob, 1e-10)); }
public void multilabel_linear_smo_new_usage() { // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; // Create a one-vs-one learning algorithm using LIBLINEAR's L2-loss SVC dual var teacher = new MultilabelSupportVectorLearning<Linear>(); teacher.Learner = (p) => new SequentialMinimalOptimization<Linear>() { UseComplexityHeuristic = true }; #if DEBUG teacher.ParallelOptions.MaxDegreeOfParallelism = 1; #endif // Learn a machine var machine = teacher.Learn(inputs, outputs); int[] actual = machine.Decide(inputs).ArgMax(dimension: 1); outputs[13] = 0; Assert.IsTrue(actual.IsEqual(outputs)); }
public void multilabel_linear_new_usage() { #region doc_learn_ldcd // Let's say we have the following data to be classified // into three possible classes. Those are the samples: // double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, }; // Create a one-vs-one multi-class SVM learning algorithm var teacher = new MultilabelSupportVectorLearning<Linear>() { // using LIBLINEAR's L2-loss SVC dual for each SVM Learner = (p) => new LinearDualCoordinateDescent() { Loss = Loss.L2 } }; // Configure parallel execution options teacher.ParallelOptions.MaxDegreeOfParallelism = 1; // Learn a machine var machine = teacher.Learn(inputs, outputs); // Obtain class predictions for each sample bool[][] predicted = machine.Decide(inputs); // Compute classification error using mean accuracy (mAcc) double error = new HammingLoss(outputs).Loss(predicted); #endregion Assert.AreEqual(0, error); Assert.IsTrue(predicted.ArgMax(dimension:1 ).IsEqual(outputs)); }