Beispiel #1
0
        private static void TestSMO()
        {
            Console.WriteLine("Downloading dataset");
            var news20 = new Accord.DataSets.News20(@"C:\Temp\");

            Sparse <double>[] inputs  = news20.Training.Item1.Get(0, 2000);
            int[]             outputs = news20.Training.Item2.ToMulticlass().Get(0, 2000);

            var learn = new MultilabelSupportVectorLearning <Linear, Sparse <double> >()
            {
                // using LIBLINEAR's SVC dual for each SVM
                Learner = (p) => new SequentialMinimalOptimization <Linear, Sparse <double> >()
                {
                    Strategy   = SelectionStrategy.SecondOrder,
                    Complexity = 1.0,
                    Tolerance  = 1e-4,
                    CacheSize  = 1000
                },
            };

            Console.WriteLine("Learning");
            Stopwatch sw  = Stopwatch.StartNew();
            var       svm = learn.Learn(inputs, outputs);

            Console.WriteLine(sw.Elapsed);

            Console.WriteLine("Predicting");
            sw = Stopwatch.StartNew();
            int[] predicted = svm.ToMulticlass().Decide(inputs);
            Console.WriteLine(sw.Elapsed);

            var test = new ConfusionMatrix(predicted, outputs);

            Console.WriteLine("Test acc: " + test.Accuracy);
        }
Beispiel #2
0
        public void no_samples_for_class()
        {
            double[][] inputs =
            {
                new double[] { 1, 1 }, // 0
                new double[] { 1, 1 }, // 0
                new double[] { 1, 1 }, // 2
            };

            int[] outputs =
            {
                0, 0, 2
            };

            var teacher = new MultilabelSupportVectorLearning <Gaussian>()
            {
                Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                {
                    UseKernelEstimation = true
                }
            };

            Assert.Throws <ArgumentException>(() => teacher.Learn(inputs, outputs),
                                              "There are no samples for class label {0}. Please make sure that class " +
                                              "labels are contiguous and there is at least one training sample for each label.", 1);
        }
Beispiel #3
0
        private static void TestPredictSparseSVM()
        {
            Console.WriteLine("Downloading dataset");
            var news20 = new Accord.DataSets.News20(@"C:\Temp\");

            Sparse <double>[] inputs  = news20.Training.Item1;
            int[]             outputs = news20.Training.Item2.ToMulticlass();

            var learn = new MultilabelSupportVectorLearning <Linear, Sparse <double> >()
            {
                // using LIBLINEAR's L2-loss SVC dual for each SVM
                Learner = (p) => new LinearDualCoordinateDescent <Linear, Sparse <double> >()
                {
                    Loss       = Loss.L2,
                    Complexity = 1.0,
                    Tolerance  = 1e-4
                }
            };

            Console.WriteLine("Learning");
            Stopwatch sw  = Stopwatch.StartNew();
            var       svm = learn.Learn(inputs.Get(0, 100), outputs.Get(0, 100));

            Console.WriteLine(sw.Elapsed);

            Console.WriteLine("Predicting");
            sw = Stopwatch.StartNew();
            int[] predicted = svm.ToMulticlass().Decide(inputs);
            Console.WriteLine(sw.Elapsed);
        }
Beispiel #4
0
        public void multilabel_linear_new_usage()
        {
            #region doc_learn_ldcd
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Create a one-vs-one multi-class SVM learning algorithm
            var teacher = new MultilabelSupportVectorLearning <Linear>()
            {
                // using LIBLINEAR's L2-loss SVC dual for each SVM
                Learner = (p) => new LinearDualCoordinateDescent()
                {
                    Loss = Loss.L2
                }
            };

            // The following line is only needed to ensure reproducible results. Please remove it to enable full parallelization
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1; // (Remove, comment, or change this line to enable full parallelism)

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            bool[][] predicted = machine.Decide(inputs);

            // Compute classification error using mean accuracy (mAcc)
            double error = new HammingLoss(outputs).Loss(predicted);
            #endregion

            Assert.AreEqual(0, error);
            Assert.IsTrue(predicted.ArgMax(dimension: 1).IsEqual(outputs));
        }
        public void learn_linear_multilabel()
        {
            #region doc_learn_multilabel
            // In this example, we will learn a multi-class SVM using the one-vs-rest (OvR)
            // approach. The OvR approacbh can decompose decision problems involving multiple
            // classes into a series of binary ones, which can then be solved using SVMs.

            // Ensure we have reproducible results
            Accord.Math.Random.Generator.Seed = 0;

            // We will try to learn a classifier
            // for the Fisher Iris Flower dataset
            var        iris    = new Iris();
            double[][] inputs  = iris.Instances;   // get the flower characteristics
            int[]      outputs = iris.ClassLabels; // get the expected flower classes

            // We will use mini-batches of size 32 to learn a SVM using SGD
            var batches = MiniBatches.Create(batchSize: 32, maxIterations: 1000,
                                             shuffle: ShuffleMethod.EveryEpoch, input: inputs, output: outputs);

            // Now, we can create a multi-label teaching algorithm for the SVMs
            var teacher = new MultilabelSupportVectorLearning <Linear, double[]>
            {
                // We will use SGD to learn each of the binary problems in the multi-class problem
                Learner = (p) => new StochasticGradientDescent <Linear, double[], LogisticLoss>()
                {
                    LearningRate  = 1e-3,
                    MaxIterations = 1 // so the gradient is only updated once after each mini-batch
                }
            };

            // The following line is only needed to ensure reproducible results. Please remove it to enable full parallelization
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1; // (Remove, comment, or change this line to enable full parallelism)

            // Now, we can start training the model on mini-batches:
            foreach (var batch in batches)
            {
                teacher.Learn(batch.Inputs, batch.Outputs);
            }

            // Get the final model:
            var svm = teacher.Model;

            // Now, we should be able to use the model to predict
            // the classes of all flowers in Fisher's Iris dataset:
            int[] prediction = svm.ToMulticlass().Decide(inputs);

            // And from those predictions, we can compute the model accuracy:
            var    cm       = new GeneralConfusionMatrix(expected: outputs, predicted: prediction);
            double accuracy = cm.Accuracy; // should be approximately 0.913
            #endregion

            Assert.AreEqual(0.91333333333333333, cm.Accuracy);
            Assert.AreEqual(150, batches.NumberOfSamples);
            Assert.AreEqual(32, batches.MiniBatchSize);
            Assert.AreEqual(213, batches.CurrentEpoch);
            Assert.AreEqual(1001, batches.CurrentIteration);
            Assert.AreEqual(82, batches.CurrentSample);
        }
        public void ComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            IKernel kernel = new Polynomial(2);
            var     msvm   = new MultilabelSupportVectorMachine(5, kernel, 4);
            var     smo    = new MultilabelSupportVectorLearning(msvm, inputs, outputs);

            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                            new SequentialMinimalOptimization(svm, classInputs, classOutputs)
            {
                Complexity = 1
            };

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            double error = smo.Run();

            Assert.AreEqual(0, error);


            int[] evals = new int[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                double   expected = outputs[i];
                double[] responses; msvm.Compute(inputs[i], out responses);
                int      actual; responses.Max(out actual);
                Assert.AreEqual(expected, actual);
                evals[i] = msvm.GetLastKernelEvaluations();
            }

            for (int i = 0; i < evals.Length; i++)
            {
                Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i]);
            }
        }
        public void RunTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Sample data
            //   The following is a simple auto association function
            //   in which each input correspond to its own class. This
            //   problem should be easily solved using a Linear kernel.

            // Sample input data
            double[][] inputs =
            {
                new double[] { 0, 0 },
                new double[] { 0, 1 },
                new double[] { 1, 0 },
                new double[] { 1, 1 },
            };

            // Outputs for each of the inputs
            int[][] outputs =
            {
                //       and   or   nand   xor
                new[] { -1, -1, +1, +1 },
                new[] { -1, +1, +1, -1 },
                new[] { -1, +1, +1, -1 },
                new[] { +1, +1, -1, +1 },
            };

            // Create a new Linear kernel
            IKernel linear = new Linear();

            // Create a new Multi-class Support Vector Machine for one input,
            //  using the linear kernel and four disjoint classes.
            var machine = new MultilabelSupportVectorMachine(inputs: 2, kernel: linear, classes: 4);

            // Create the Multi-class learning algorithm for the machine
            var teacher = new MultilabelSupportVectorLearning(machine, inputs, outputs);

            // Configure the learning algorithm to use SMO to train the
            //  underlying SVMs in each of the binary class subproblems.
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                                new SequentialMinimalOptimization(svm, classInputs, classOutputs)
            {
                // Create a hard SVM
                Complexity = 10000.0
            };

            // Run the learning algorithm
            double error = teacher.Run();

            // only xor is not learnable by
            // a hard-margin linear machine
            Assert.AreEqual(2 / 16.0, error);
        }
Beispiel #8
0
        public MultilabelSupportVectorMachine <Linear> Teach(double[][] inputs, int[] outputs)
        {
            var teacher = new MultilabelSupportVectorLearning <Linear>()
            {
                Learner = (p) => new LinearDualCoordinateDescent()
                {
                    Loss = Loss.L2
                }
            };

            return(teacher.Learn(inputs, outputs));
        }
Beispiel #9
0
        public void multilabel_linear_smo_new_usage()
        {
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Create a one-vs-one learning algorithm using LIBLINEAR's L2-loss SVC dual
            var teacher = new MultilabelSupportVectorLearning <Linear>();

            teacher.Learner = (p) => new SequentialMinimalOptimization <Linear>()
            {
                UseComplexityHeuristic = true
            };

#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);

            int[] actual = machine.Decide(inputs).ArgMax(dimension: 1);
            outputs[13] = 0;
            Assert.IsTrue(actual.IsEqual(outputs));
        }
Beispiel #10
0
        private static void multilabelsvm()
        {
            // Sample data
            // The following is simple auto association function
            // where each input correspond to its own class. This
            // problem should be easily solved by a Linear kernel.

            // Sample input data
            double[][] inputs =
            {
                new double[] { 0 },
                new double[] { 3 },
                new double[] { 1 },
                new double[] { 2 },
            };

            // Outputs for each of the inputs
            int[][] outputs =
            {
                new[] { -1,  1, -1 },
                new[] { -1, -1,  1 },
                new[] {  1,  1, -1 },
                new[] { -1, -1, -1 },
            };


            // Create a new Linear kernel
            IKernel kernel = new Linear();

            // Create a new Multi-class Support Vector Machine with one input,
            //  using the linear kernel and for four disjoint classes.
            var machine = new MultilabelSupportVectorMachine(1, kernel, 3);

            // Create the Multi-label learning algorithm for the machine
            var teacher = new MultilabelSupportVectorLearning(machine, inputs, outputs);

            // Configure the learning algorithm to use SMO to train the
            //  underlying SVMs in each of the binary class subproblems.
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                                new SequentialMinimalOptimization(svm, classInputs, classOutputs)
            {
                // Create a hard SVM
                Complexity = 10000.0
            };

            // Run the learning algorithm
            double error = teacher.Run();

            int[][] answers = inputs.Apply(machine.Compute);
        }
Beispiel #11
0
        static void Main(string[] args)
        {
            double[][] inputs =
            {
                new double[] { 0 },
                new double[] { 3 },
                new double[] { 1 },
                new double[] { 2 },
            };

            int[][] outputs =
            {
                new[] { -1,  1, -1 },
                new[] { -1, -1,  1 },
                new[] {  1,  1, -1 },
                new[] { -1, -1, -1 }
            };

            var teacher = new MultilabelSupportVectorLearning <Linear>()
            {
                Learner = (p) => new SequentialMinimalOptimization <Linear>()
                {
                    Complexity = 10000.0
                }
            };

            var svm = teacher.Learn(inputs, outputs);

            double[][] results = svm.Probabilities(inputs);
            //double[][] results = new double[4][];
            //for (int i = 0; i < results.Length; i++)
            //    results[i] = new double[4];

            //double[][] results2 = svm.Decide(inputs, results);

            int[] maxAnswers = svm.ToMulticlass().Decide(inputs);



            Console.ReadKey();
        }
Beispiel #12
0
        private static void multilabelSvm(double[][] inputs, int[] outputs)
        {
            // Create the multi-label learning algorithm as one-vs-rest
            var teacher = new MultilabelSupportVectorLearning <Linear>()
            {
                Learner = (p) => new SequentialMinimalOptimization <Linear>()
                {
                    Complexity = 10000.0 // Create a hard SVM
                }
            };

            // Learn a multi-label SVM using the teacher
            var svm = teacher.Learn(inputs, outputs);

            // Get the predictions for the inputs
            bool[][] predicted = svm.Decide(inputs);

            // Use the machine as if it were a multi-class machine
            // instead of a multi-label, identifying the strongest
            // class among the multi-label predictions:
            int[] classLabels = svm.ToMulticlass().Decide(inputs);
        }
Beispiel #13
0
        public void serialize_reload_new_version()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };

            IKernel kernel = new Linear();
            var     msvm   = new MultilabelSupportVectorMachine(5, kernel, 4);
            var     smo    = new MultilabelSupportVectorLearning(msvm, inputs, outputs);

            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                            new SequentialMinimalOptimization(svm, classInputs, classOutputs)
            {
                Complexity = 1
            };

            double expected = smo.Run();


            // Save the machines

            var bytes = msvm.Save();

            // Reload the machines
            var target = Serializer.Load <MultilabelSupportVectorMachine>(bytes);

            double actual;

            int count = 0; // Compute errors

            for (int i = 0; i < inputs.Length; i++)
            {
                double[] responses;
                target.Compute(inputs[i], out responses);
                int y; responses.Max(out y);
                if (y != outputs[i])
                {
                    count++;
                }
            }

            actual = (double)count / inputs.Length;


            Assert.AreEqual(expected, actual);

            Assert.AreEqual(msvm.Inputs, target.Inputs);
            Assert.AreEqual(msvm.Classes, target.Classes);
            for (int i = 0; i < msvm.Machines.Length; i++)
            {
                var a = msvm[i];
                var b = target[i];

                Assert.AreEqual(a.Threshold, b.Threshold);
                Assert.AreEqual(a.NumberOfInputs, b.NumberOfInputs);
                Assert.AreEqual(a.NumberOfOutputs, b.NumberOfOutputs);
                Assert.IsTrue(a.Weights.IsEqual(b.Weights));

                Assert.IsTrue(a.SupportVectors.IsEqual(b.SupportVectors));
            }
        }
        public void multilabel_calibration_generic_kernel()
        {
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1,
                2, 2, 2, 2,
            };

            // Create the multi-class learning algorithm for the machine
            var teacher = new MultilabelSupportVectorLearning <IKernel>()
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new SequentialMinimalOptimization <IKernel>()
                {
                    UseKernelEstimation = false,
                    Kernel = Gaussian.FromGamma(0.5)
                }
            };

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);


            // Create the multi-class learning algorithm for the machine
            var calibration = new MultilabelSupportVectorLearning <IKernel>(machine)
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (p) => new ProbabilisticOutputCalibration <IKernel>(p.Model)
            };


            // Configure parallel execution options
            calibration.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Learn a machine
            calibration.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            bool[][] predicted = machine.Decide(inputs);

            // Get class scores for each sample
            double[][] scores = machine.Scores(inputs);

            // Get log-likelihoods (should be same as scores)
            double[][] logl = machine.LogLikelihoods(inputs);

            // Get probability for each sample
            double[][] prob = machine.Probabilities(inputs);

            // Compute classification error using mean accuracy (mAcc)
            double error = new HammingLoss(outputs).Loss(predicted);
            double loss  = new CategoryCrossEntropyLoss(outputs).Loss(prob);

            string a = scores.ToCSharp();
            string b = logl.ToCSharp();
            string c = prob.ToCSharp();

            double[][] expectedScores =
            {
                new double[] {  1.85316017783605, -2.59688389729331,  -2.32170102153988 },
                new double[] {  1.84933597524124, -1.99399145231446,   -2.2920299547693 },
                new double[] {  1.44477953581274, -1.98592298465108,  -2.27356092239125 },
                new double[] {  1.85316017783605, -2.59688389729331,  -2.32170102153988 },
                new double[] {  1.84933597524124, -1.99399145231446,   -2.2920299547693 },
                new double[] { -2.40815576360914, 0.328362962196791, -0.932721757919691 },
                new double[] { -2.13111157264226,    1.809192096031,   -2.2920299547693 },
                new double[] { -2.13111157264226,    1.809192096031,   -2.2920299547693 },
                new double[] { -2.14888646926108, -1.99399145231447,   1.33101148524982 },
                new double[] { -2.12915064678299, -1.98592298465108,    1.3242171079396 },
                new double[] { -1.47197826667149, -1.96368715704762,  0.843414180834243 },
                new double[] { -2.14221021749314, -2.83117892529093,   2.61354519154994 }
            };

            double[][] expectedLogL =
            {
                new double[] {  1.85316017783605, -2.59688389729331,  -2.32170102153988 },
                new double[] {  1.84933597524124, -1.99399145231446,   -2.2920299547693 },
                new double[] {  1.44477953581274, -1.98592298465108,  -2.27356092239125 },
                new double[] {  1.85316017783605, -2.59688389729331,  -2.32170102153988 },
                new double[] {  1.84933597524124, -1.99399145231446,   -2.2920299547693 },
                new double[] { -2.40815576360914, 0.328362962196791, -0.932721757919691 },
                new double[] { -2.13111157264226,    1.809192096031,   -2.2920299547693 },
                new double[] { -2.13111157264226,    1.809192096031,   -2.2920299547693 },
                new double[] { -2.14888646926108, -1.99399145231447,   1.33101148524982 },
                new double[] { -2.12915064678299, -1.98592298465108,    1.3242171079396 },
                new double[] { -1.47197826667149, -1.96368715704762,  0.843414180834243 },
                new double[] { -2.14221021749314, -2.83117892529093,   2.61354519154994 }
            };

            double[][] expectedProbs =
            {
                new double[] {   6.37994947365835, 0.0745053832890827, 0.0981065622139132 },
                new double[] {   6.35559784678136,  0.136150899620619,  0.101061104020747 },
                new double[] {   4.24091706941419,  0.137253872418087,  0.102944947658882 },
                new double[] {   6.37994947365835, 0.0745053832890827, 0.0981065622139132 },
                new double[] {   6.35559784678136,  0.136150899620619,  0.101061104020747 },
                new double[] { 0.0899810880411361,   1.38869292386051,  0.393481290780948 },
                new double[] {  0.118705270957796,   6.10551277113228,  0.101061104020747 },
                new double[] {  0.118705270957796,   6.10551277113228,  0.101061104020747 },
                new double[] {  0.116613938707895,  0.136150899620619,   3.78486979203385 },
                new double[] {  0.118938271567046,  0.137253872418087,   3.75924112261421 },
                new double[] {  0.229471080877097,  0.140340010119971,    2.3242889884131 },
                new double[] {   0.11739508739354, 0.0589433229176013,   13.6473476521179 }
            };

            int[] actual = predicted.ArgMax(dimension: 1);
            Assert.IsTrue(actual.IsEqual(outputs));

            // Must be exactly the same as test above
            Assert.AreEqual(0, error);
            Assert.AreEqual(0.5, ((Gaussian)machine[0].Kernel).Gamma);
            Assert.AreEqual(0.5, ((Gaussian)machine[1].Kernel).Gamma);
            Assert.AreEqual(0.5, ((Gaussian)machine[2].Kernel).Gamma);
            Assert.AreEqual(-18.908706961799737, loss);
            Assert.IsTrue(expectedScores.IsEqual(scores, 1e-10));
            Assert.IsTrue(expectedLogL.IsEqual(logl, 1e-10));
            Assert.IsTrue(expectedProbs.IsEqual(prob, 1e-10));
        }
Beispiel #15
0
        private static void TestSparseSVMComplete()
        {
            #region doc_learn_news20
            Console.WriteLine("Downloading dataset:");
            var news20       = new Accord.DataSets.News20(@"C:\Temp\");
            var trainInputs  = news20.Training.Item1;
            var trainOutputs = news20.Training.Item2.ToMulticlass();
            var testInputs   = news20.Testing.Item1;
            var testOutputs  = news20.Testing.Item2.ToMulticlass();

            Console.WriteLine(" - Training samples: {0}", trainInputs.Rows());
            Console.WriteLine(" - Testing samples: {0}", testInputs.Rows());
            Console.WriteLine(" - Dimensions: {0}", trainInputs.Columns());
            Console.WriteLine(" - Classes: {0}", trainOutputs.DistinctCount());
            Console.WriteLine();


            // Create and use the learning algorithm to train a sparse linear SVM
            var learn = new MultilabelSupportVectorLearning <Linear, Sparse <double> >()
            {
                // using LIBLINEAR's L2-loss SVC dual for each SVM
                Learner = (p) => new LinearDualCoordinateDescent <Linear, Sparse <double> >()
                {
                    Loss      = Loss.L2,
                    Tolerance = 1e-4
                },
            };

            // Display progress in the console
            learn.SubproblemFinished += (sender, e) =>
            {
                Console.WriteLine(" - {0} / {1} ({2:00.0%})", e.Progress, e.Maximum, e.Progress / (double)e.Maximum);
            };

            // Start the learning algorithm
            Console.WriteLine("Learning");
            Stopwatch sw  = Stopwatch.StartNew();
            var       svm = learn.Learn(trainInputs, trainOutputs);
            Console.WriteLine("Done in {0}", sw.Elapsed);
            Console.WriteLine();


            // Compute accuracy in the training set
            Console.WriteLine("Predicting training set");
            sw = Stopwatch.StartNew();
            int[] trainPredicted = svm.ToMulticlass().Decide(trainInputs);
            Console.WriteLine("Done in {0}", sw.Elapsed);

            double trainError = new ZeroOneLoss(trainOutputs).Loss(trainPredicted);
            Console.WriteLine("Training error: {0}", trainError);
            Console.WriteLine();


            // Compute accuracy in the testing set
            Console.WriteLine("Predicting testing set");
            sw = Stopwatch.StartNew();
            int[] testPredicted = svm.ToMulticlass().Decide(testInputs);
            Console.WriteLine("Done in {0}", sw.Elapsed);

            double testError = new ZeroOneLoss(testOutputs).Loss(testPredicted);
            Console.WriteLine("Testing error: {0}", testError);
            #endregion
        }
Beispiel #16
0
        public void LinearComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            var msvm = new MultilabelSupportVectorMachine(5, 4);
            var smo  = new MultilabelSupportVectorLearning(msvm, inputs, outputs);

            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                            new LinearNewtonMethod(svm, classInputs, classOutputs)
            {
                Complexity = 1
            };

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

#if DEBUG
            smo.ParallelOptions.MaxDegreeOfParallelism  = 1;
            msvm.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif

            double error = smo.Run();
            Assert.AreEqual(0.125, error);


            int[] evals = new int[inputs.Length];
            int[] y     = new int[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                double   expected = outputs[i];
                double[] responses;
                msvm.Compute(inputs[i], out responses);
                int actual;
                responses.Max(out actual);
                y[i] = actual;
                if (i < 6)
                {
                    Assert.AreEqual(expected, actual);
                    evals[i] = msvm.GetLastKernelEvaluations();
                }
                else
                {
                    Assert.AreNotEqual(expected, actual);
                    evals[i] = msvm.GetLastKernelEvaluations();
                }
            }

            for (int i = 0; i < evals.Length; i++)
            {
                Assert.AreEqual(0, evals[i]);
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                int actual;
                msvm.Scores(inputs[i], out actual);
                Assert.AreEqual(y[i], actual);
            }
        }
Beispiel #17
0
        public void SerializeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };

            IKernel kernel = new Linear();
            var     msvm   = new MultilabelSupportVectorMachine(5, kernel, 4);
            var     smo    = new MultilabelSupportVectorLearning(msvm, inputs, outputs);

            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                            new SequentialMinimalOptimization(svm, classInputs, classOutputs)
            {
                Complexity = 1
            };

            double error = smo.Run();

            Assert.AreEqual(0, error);

            int count = 0; // Compute errors

            for (int i = 0; i < inputs.Length; i++)
            {
                double[] responses;
                msvm.Compute(inputs[i], out responses);
                int y; responses.Max(out y);
                if (y != outputs[i])
                {
                    count++;
                }
            }

            double expected = (double)count / inputs.Length;

            Assert.AreEqual(msvm.Inputs, 5);
            Assert.AreEqual(msvm.Classes, 4);
            Assert.AreEqual(4, msvm.Machines.Length);


            MemoryStream stream = new MemoryStream();

            // Save the machines
            msvm.Save(stream);

            // Rewind
            stream.Seek(0, SeekOrigin.Begin);

            // Reload the machines
            var target = MultilabelSupportVectorMachine.Load(stream);

            double actual;

            count = 0; // Compute errors
            for (int i = 0; i < inputs.Length; i++)
            {
                double[] responses;
                target.Compute(inputs[i], out responses);
                int y; responses.Max(out y);
                if (y != outputs[i])
                {
                    count++;
                }
            }

            actual = (double)count / inputs.Length;


            Assert.AreEqual(expected, actual);

            Assert.AreEqual(msvm.Inputs, target.Inputs);
            Assert.AreEqual(msvm.Classes, target.Classes);
            for (int i = 0; i < msvm.Machines.Length; i++)
            {
                var a = msvm[i];
                var b = target[i];

                Assert.IsTrue(a.SupportVectors.IsEqual(b.SupportVectors));
            }
        }
Beispiel #18
0
        public void multilabel_calibration_generic_kernel()
        {
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1,
                2, 2, 2, 2,
            };

            // Create the multi-class learning algorithm for the machine
            var teacher = new MultilabelSupportVectorLearning <IKernel>()
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new SequentialMinimalOptimization <IKernel>()
                {
                    UseKernelEstimation = false,
                    Kernel = Gaussian.FromGamma(0.5)
                }
            };

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);


            // Create the multi-class learning algorithm for the machine
            var calibration = new MultilabelSupportVectorLearning <IKernel>(machine)
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (p) => new ProbabilisticOutputCalibration <IKernel>(p.Model)
            };


            // Configure parallel execution options
            calibration.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Learn a machine
            calibration.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            bool[][] predicted = machine.Decide(inputs);

            // Get class scores for each sample
            double[][] scores = machine.Scores(inputs);

            // Get log-likelihoods (should be same as scores)
            double[][] logl = machine.LogLikelihoods(inputs);

            // Get probability for each sample
            double[][] prob = machine.Probabilities(inputs);

            // Compute classification error using mean accuracy (mAcc)
            double error = new HammingLoss(outputs).Loss(predicted);
            double loss  = new CategoryCrossEntropyLoss(outputs).Loss(prob);

            string a = scores.ToCSharp();
            string b = logl.ToCSharp();
            string c = prob.ToCSharp();

            double[][] expectedScores =
            {
                new double[] {  1.85316017783605, -2.59688389729331,  -2.32170102153988 },
                new double[] {  1.84933597524124, -1.99399145231446,   -2.2920299547693 },
                new double[] {  1.44477953581274, -1.98592298465108,  -2.27356092239125 },
                new double[] {  1.85316017783605, -2.59688389729331,  -2.32170102153988 },
                new double[] {  1.84933597524124, -1.99399145231446,   -2.2920299547693 },
                new double[] { -2.40815576360914, 0.328362962196791, -0.932721757919691 },
                new double[] { -2.13111157264226,    1.809192096031,   -2.2920299547693 },
                new double[] { -2.13111157264226,    1.809192096031,   -2.2920299547693 },
                new double[] { -2.14888646926108, -1.99399145231447,   1.33101148524982 },
                new double[] { -2.12915064678299, -1.98592298465108,    1.3242171079396 },
                new double[] { -1.47197826667149, -1.96368715704762,  0.843414180834243 },
                new double[] { -2.14221021749314, -2.83117892529093,   2.61354519154994 }
            };

            double[][] expectedLogL =
            {
                new double[] { -0.145606614365135,  -2.66874434442222,   -2.41528841111469 },
                new double[] { -0.146125659911391,  -2.12163759796483,    -2.3883043096263 },
                new double[] { -0.211716960454159,  -2.11453945718522,   -2.37154474995633 },
                new double[] { -0.145606614365135,  -2.66874434442222,   -2.41528841111469 },
                new double[] { -0.146125659911391,  -2.12163759796483,    -2.3883043096263 },
                new double[] {   -2.4943161092787, -0.542383360363463,   -1.26452689970624 },
                new double[] {  -2.24328358118314, -0.151678833375872,    -2.3883043096263 },
                new double[] {  -2.24328358118314, -0.151678833375872,    -2.3883043096263 },
                new double[] {  -2.25918730624753,  -2.12163759796483,  -0.234447327588685 },
                new double[] {  -2.24153091066541,  -2.11453945718522,    -0.2358711195715 },
                new double[] {  -1.67856232802554,   -2.0950136294762,  -0.357841632335707 },
                new double[] {  -2.25321037906455,  -2.88845047104229, -0.0707140798850236 }
            };

            double[][] expectedProbs =
            {
                new double[] {  0.844913862516144, 0.0677684640174953, 0.0873176734663607 },
                new double[] {  0.803266328757473,  0.111405242674824, 0.0853284285677024 },
                new double[] {  0.790831391595502,  0.117950175028754, 0.0912184333757438 },
                new double[] {  0.844913862516144, 0.0677684640174953, 0.0873176734663607 },
                new double[] {  0.803266328757473,  0.111405242674824, 0.0853284285677024 },
                new double[] { 0.0872387667998771,  0.614360294206236,  0.298400938993887 },
                new double[] {  0.100372339295793,  0.812805149315815, 0.0868225113883914 },
                new double[] {  0.100372339295793,  0.812805149315815, 0.0868225113883914 },
                new double[] {  0.102863726210119,   0.11803188195247,  0.779104391837411 },
                new double[] {  0.104532503226998,  0.118686968710368,  0.776780528062634 },
                new double[] {  0.184996665350572,  0.121983586443407,  0.693019748206021 },
                new double[] { 0.0961702585148881, 0.0509517983210315,   0.85287794316408 }
            };

            int[] actual = predicted.ArgMax(dimension: 1);
            Assert.IsTrue(actual.IsEqual(outputs));

            // Must be exactly the same as test above
            Assert.AreEqual(0, error);
            Assert.AreEqual(0.5, ((Gaussian)machine[0].Kernel).Gamma);
            Assert.AreEqual(0.5, ((Gaussian)machine[1].Kernel).Gamma);
            Assert.AreEqual(0.5, ((Gaussian)machine[2].Kernel).Gamma);
            Assert.AreEqual(2.9395943260892361, loss);
            Assert.IsTrue(expectedScores.IsEqual(scores, 1e-10));
            Assert.IsTrue(expectedLogL.IsEqual(logl, 1e-10));
            Assert.IsTrue(expectedProbs.IsEqual(prob, 1e-10));

            double[] probabilities = CorrectProbabilities(machine, inputs[0]);
            double[] actualProb    = machine.Probabilities(inputs[0]);
            Assert.IsTrue(probabilities.IsEqual(actualProb, 1e-8));
        }
Beispiel #19
0
        public void multilabel_calibration()
        {
            #region doc_learn_calibration
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1,
                2, 2, 2, 2,
            };

            // Create the multi-class learning algorithm for the machine
            var teacher = new MultilabelSupportVectorLearning <Gaussian>()
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                {
                    // Estimate a suitable guess for the Gaussian kernel's parameters.
                    // This estimate can serve as a starting point for a grid search.
                    UseKernelEstimation = true
                }
            };

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);

            // Create the multi-class learning algorithm for the machine
            var calibration = new MultilabelSupportVectorLearning <Gaussian>()
            {
                Model = machine, // We will start with an existing machine

                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new ProbabilisticOutputCalibration <Gaussian>()
                {
                    Model = param.Model // Start with an existing machine
                }
            };


            // Configure parallel execution options
            calibration.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Learn a machine
            calibration.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            bool[][] predicted = machine.Decide(inputs);

            // Get class scores for each sample
            double[][] scores = machine.Scores(inputs);

            // Get log-likelihoods (should be same as scores)
            double[][] logl = machine.LogLikelihoods(inputs);

            // Get probability for each sample
            double[][] prob = machine.Probabilities(inputs);

            // Compute classification error using mean accuracy (mAcc)
            double error = new HammingLoss(outputs).Loss(predicted);
            double loss  = new CategoryCrossEntropyLoss(outputs).Loss(prob);
            #endregion

            string a = scores.ToCSharp();
            string b = logl.ToCSharp();
            string c = prob.ToCSharp();

            double[][] expectedScores =
            {
                new double[] {  1.85316017783605, -2.59688389729331,  -2.32170102153988 },
                new double[] {  1.84933597524124, -1.99399145231446,   -2.2920299547693 },
                new double[] {  1.44477953581274, -1.98592298465108,  -2.27356092239125 },
                new double[] {  1.85316017783605, -2.59688389729331,  -2.32170102153988 },
                new double[] {  1.84933597524124, -1.99399145231446,   -2.2920299547693 },
                new double[] { -2.40815576360914, 0.328362962196791, -0.932721757919691 },
                new double[] { -2.13111157264226,    1.809192096031,   -2.2920299547693 },
                new double[] { -2.13111157264226,    1.809192096031,   -2.2920299547693 },
                new double[] { -2.14888646926108, -1.99399145231447,   1.33101148524982 },
                new double[] { -2.12915064678299, -1.98592298465108,    1.3242171079396 },
                new double[] { -1.47197826667149, -1.96368715704762,  0.843414180834243 },
                new double[] { -2.14221021749314, -2.83117892529093,   2.61354519154994 }
            };

            double[][] expectedLogL =
            {
                new double[] { -0.145606614365135,  -2.66874434442222,   -2.41528841111469 },
                new double[] { -0.146125659911391,  -2.12163759796483,    -2.3883043096263 },
                new double[] { -0.211716960454159,  -2.11453945718522,   -2.37154474995633 },
                new double[] { -0.145606614365135,  -2.66874434442222,   -2.41528841111469 },
                new double[] { -0.146125659911391,  -2.12163759796483,    -2.3883043096263 },
                new double[] {   -2.4943161092787, -0.542383360363463,   -1.26452689970624 },
                new double[] {  -2.24328358118314, -0.151678833375872,    -2.3883043096263 },
                new double[] {  -2.24328358118314, -0.151678833375872,    -2.3883043096263 },
                new double[] {  -2.25918730624753,  -2.12163759796483,  -0.234447327588685 },
                new double[] {  -2.24153091066541,  -2.11453945718522,    -0.2358711195715 },
                new double[] {  -1.67856232802554,   -2.0950136294762,  -0.357841632335707 },
                new double[] {  -2.25321037906455,  -2.88845047104229, -0.0707140798850236 }
            };

            double[][] expectedProbs =
            {
                new double[] {  0.844913862516144, 0.0677684640174953, 0.0873176734663607 },
                new double[] {  0.803266328757473,  0.111405242674824, 0.0853284285677024 },
                new double[] {  0.790831391595502,  0.117950175028754, 0.0912184333757438 },
                new double[] {  0.844913862516144, 0.0677684640174953, 0.0873176734663607 },
                new double[] {  0.803266328757473,  0.111405242674824, 0.0853284285677024 },
                new double[] { 0.0872387667998771,  0.614360294206236,  0.298400938993887 },
                new double[] {  0.100372339295793,  0.812805149315815, 0.0868225113883914 },
                new double[] {  0.100372339295793,  0.812805149315815, 0.0868225113883914 },
                new double[] {  0.102863726210119,   0.11803188195247,  0.779104391837411 },
                new double[] {  0.104532503226998,  0.118686968710368,  0.776780528062634 },
                new double[] {  0.184996665350572,  0.121983586443407,  0.693019748206021 },
                new double[] { 0.0961702585148881, 0.0509517983210315,   0.85287794316408 }
            };

            int[] actual = predicted.ArgMax(dimension: 1);
            Assert.IsTrue(actual.IsEqual(outputs));
            Assert.AreEqual(0, error);
            Assert.AreEqual(3, machine.Count);
            Assert.AreEqual(0.5, machine[0].Kernel.Gamma);
            Assert.AreEqual(0.5, machine[1].Kernel.Gamma);
            Assert.AreEqual(0.5, machine[2].Kernel.Gamma);
            Assert.AreEqual(2.9395943260892361, loss);
            Assert.IsTrue(expectedScores.IsEqual(scores, 1e-10));
            Assert.IsTrue(expectedLogL.IsEqual(logl, 1e-10));
            Assert.IsTrue(expectedProbs.IsEqual(prob, 1e-10));
            double[] rowSums = expectedProbs.Sum(1);
            Assert.IsTrue(rowSums.IsEqual(Vector.Ones(expectedProbs.Length), 1e-10));

            {
                bool[][]   predicted2 = null;
                double[][] scores2    = machine.Scores(inputs, ref predicted2);
                Assert.IsTrue(scores2.IsEqual(scores));
                Assert.IsTrue(predicted2.IsEqual(predicted));

                double[][] logl2 = machine.LogLikelihoods(inputs, ref predicted2);
                Assert.IsTrue(logl2.IsEqual(logl));
                Assert.IsTrue(predicted2.IsEqual(predicted));

                double[][] prob2 = machine.Probabilities(inputs, ref predicted2);
                Assert.IsTrue(prob2.IsEqual(prob));
                Assert.IsTrue(predicted2.IsEqual(predicted));

                bool[][]   predicted3 = new bool[predicted2.Length][];
                double[][] scores3    = inputs.ApplyWithIndex((x, i) => machine.Scores(x, ref predicted3[i]));
                Assert.IsTrue(scores3.IsEqual(scores));
                Assert.IsTrue(predicted3.IsEqual(predicted));

                double[][] logl3 = inputs.ApplyWithIndex((x, i) => machine.LogLikelihoods(x, ref predicted3[i]));
                Assert.IsTrue(logl3.IsEqual(logl));
                Assert.IsTrue(predicted3.IsEqual(predicted));

                double[][] prob3 = inputs.ApplyWithIndex((x, i) => machine.Probabilities(x, ref predicted3[i]));
                Assert.IsTrue(prob3.IsEqual(prob));
                Assert.IsTrue(predicted3.IsEqual(predicted));
            }

            {
                double[] ed = new double[scores.Length];
                double[] es = new double[scores.Length];
                double[] el = new double[scores.Length];
                double[] ep = new double[scores.Length];
                for (int i = 0; i < expectedScores.Length; i++)
                {
                    int j = scores[i].ArgMax();
                    ed[i] = j;
                    es[i] = scores[i][j];
                    el[i] = logl[i][j];
                    ep[i] = prob[i][j];
                }

                int[]    predicted2 = null;
                double[] scores2    = machine.ToMulticlass().Score(inputs, ref predicted2);
                Assert.IsTrue(scores2.IsEqual(es));
                Assert.IsTrue(predicted2.IsEqual(ed));

                double[] logl2 = machine.ToMulticlass().LogLikelihood(inputs, ref predicted2);
                Assert.IsTrue(logl2.IsEqual(el));
                Assert.IsTrue(predicted2.IsEqual(ed));

                double[] prob2 = machine.ToMulticlass().Probability(inputs, ref predicted2);
                Assert.IsTrue(prob2.IsEqual(ep));
                Assert.IsTrue(predicted2.IsEqual(ed));

                int[]    predicted3 = new int[predicted2.Length];
                double[] scores3    = inputs.ApplyWithIndex((x, i) => machine.ToMulticlass().Score(x, out predicted3[i]));
                Assert.IsTrue(scores3.IsEqual(es));
                Assert.IsTrue(predicted3.IsEqual(ed));

                double[] logl3 = inputs.ApplyWithIndex((x, i) => machine.ToMulticlass().LogLikelihood(x, out predicted3[i]));
                Assert.IsTrue(logl3.IsEqual(el));
                Assert.IsTrue(predicted3.IsEqual(ed));

                double[] prob3 = inputs.ApplyWithIndex((x, i) => machine.ToMulticlass().Probability(x, out predicted3[i]));
                Assert.IsTrue(prob3.IsEqual(ep));
                Assert.IsTrue(predicted3.IsEqual(ed));
            }
        }