public MulticlassSupportVectorMachine<Polynomial> v3_1_0()
        {
            ksvm = new MulticlassSupportVectorMachine<Polynomial>(
                inputs: 2, kernel: new Polynomial(2), classes: 10);

            smo = new MulticlassSupportVectorLearning<Polynomial>()
            {
                Model = ksvm
            };

            smo.Learn(problem.Training.Inputs, problem.Testing.Output);
            return ksvm;
        }
        public void SparseLinearTest()
        {
            MulticlassSupportVectorMachine<Linear> svm1;
            MulticlassSupportVectorMachine<Linear, Sparse<double>> svm2;

            {
                Accord.Math.Random.Generator.Seed = 0;
                MemoryStream file = new MemoryStream(
                    Encoding.Default.GetBytes(Resources.iris_scale));
                var reader = new SparseReader(file, Encoding.Default);

                var samples = reader.ReadDenseToEnd();
                double[][] x = samples.Item1;
                int[] y = samples.Item2.ToMulticlass();

                var learner = new MulticlassSupportVectorLearning<Linear>()
                {
                    Learner = (p) => new LinearDualCoordinateDescent<Linear>()
                };

                svm1 = learner.Learn(x, y);
            }

            {
                Accord.Math.Random.Generator.Seed = 0;
                MemoryStream file = new MemoryStream(
                    Encoding.Default.GetBytes(Resources.iris_scale));

                // Create a new Sparse Sample Reader to read any given file,
                //  passing the correct dense sample size in the constructor
                var reader = new SparseReader(file, Encoding.Default);

                var samples = reader.ReadSparseToEnd();
                Sparse<double>[] x = samples.Item1;
                int[] y = samples.Item2.ToMulticlass();

                var learner = new MulticlassSupportVectorLearning<Linear, Sparse<double>>()
                {
                    Learner = (p) => new LinearDualCoordinateDescent<Linear, Sparse<double>>()
                };

                svm2 = learner.Learn(x, y);
            }

            Assert.AreEqual(svm1.Models.Length, svm2.Models.Length);
            for (int i = 0; i < svm1.Models.Length; i++)
            {
                var ma = svm1[i].Value;
                var mb = svm2[i].Value;

                Assert.IsTrue(ma.Weights.IsEqual(mb.Weights));
                Assert.AreEqual(ma.SupportVectors.Length, mb.SupportVectors.Length);
                for (int j = 0; j < ma.SupportVectors.Length; j++)
                {
                    double[] expected = ma.SupportVectors[j];
                    double[] actual = mb.SupportVectors[j].ToDense(4);
                    Assert.IsTrue(expected.IsEqual(actual, 1e-5));
                }
            }
        }
        public void multiclass_precomputed_matrix_smo()
        {
            #region doc_precomputed
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] trainInputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] trainOutputs = // those are the training set class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Let's chose a kernel function
            Polynomial kernel = new Polynomial(2);

            // Get the kernel matrix for the training set
            double[][] K = kernel.ToJagged(trainInputs);

            // Create a pre-computed kernel
            var pre  = new Precomputed(K);

            // Create a one-vs-one learning algorithm using SMO
            var teacher = new MulticlassSupportVectorLearning<Precomputed, int>()
            {
                Learner = (p) => new SequentialMinimalOptimization<Precomputed, int>()
                {
                    Kernel = pre
                }
            };

#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif

            // Learn a machine
            var machine = teacher.Learn(pre.Indices, trainOutputs);

            // Compute the machine's prediction for the training set
            int[] trainPrediction = machine.Decide(pre.Indices);

            // Evaluate prediction error for the training set using mean accuracy (mAcc)
            double trainingError = new ZeroOneLoss(trainOutputs).Loss(trainPrediction);

            // Now let's compute the machine's prediction for a test set
            double[][] testInputs = // test-set inputs
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] testOutputs = // those are the test set class labels
            {
                0, 0,  1,  2, 
            };

            // Compute precomputed matrix between train and testing
            pre.Values = kernel.ToJagged2(trainInputs, testInputs);

            // Update the kernel
            machine.Kernel = pre;

            // Compute the machine's prediction for the test set
            int[] testPrediction = machine.Decide(pre.Indices);

            // Evaluate prediction error for the training set using mean accuracy (mAcc)
            double testError = new ZeroOneLoss(testOutputs).Loss(testPrediction);
            #endregion


            Assert.AreEqual(0, trainingError);
            Assert.AreEqual(0, testError);

            // Create a one-vs-one learning algorithm using SMO
            var teacher2 = new MulticlassSupportVectorLearning<Polynomial>()
            {
                Learner = (p) => new SequentialMinimalOptimization<Polynomial>()
                {
                    Kernel = kernel
                }
            };

#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif

            // Learn a machine
            var expected = teacher2.Learn(trainInputs, trainOutputs);

            Assert.AreEqual(4, expected.NumberOfInputs);
            Assert.AreEqual(3, expected.NumberOfOutputs);
            Assert.AreEqual(0, machine.NumberOfInputs);
            Assert.AreEqual(3, machine.NumberOfOutputs);

            var machines = Enumerable.Zip(machine, expected, (a,b) => Tuple.Create(a.Value, b.Value));

            foreach (var pair in machines)
            {
                var a = pair.Item1;
                var e = pair.Item2;

                Assert.AreEqual(0, a.NumberOfInputs);
                Assert.AreEqual(2, a.NumberOfOutputs);

                Assert.AreEqual(4, e.NumberOfInputs);
                Assert.AreEqual(2, e.NumberOfOutputs);

                Assert.IsTrue(a.Weights.IsEqual(e.Weights));
            }
        }
        public void learn_test()
        {
            #region doc_learn
            // Generate always same random numbers
            Accord.Math.Random.Generator.Seed = 0;

            // The following is a simple auto association function in which 
            // the last column of each input correspond to its own class. This
            // problem should be easily solved using a Linear kernel.

            // Sample input data
            double[][] inputs =
            {
                new double[] { 1, 2, 0 },
                new double[] { 6, 2, 3 },
                new double[] { 1, 1, 1 },
                new double[] { 7, 6, 2 },
            };

            // Output for each of the inputs
            int[] outputs = { 0, 3, 1, 2 };


            // Create the multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning<Linear>()
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new SequentialMinimalOptimization<Linear>()
                {
                    // If you would like to use other kernels, simply replace
                    // the generic parameter to the desired kernel class, such
                    // as for example, Polynomial or Gaussian:

                    Kernel = new Linear() // use the Linear kernel
                }
            };

            // Estimate the multi-class support vector machine using one-vs-one method
            MulticlassSupportVectorMachine<Linear> ovo = teacher.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            int[] predicted = ovo.Decide(inputs);

            // Compute classification error
            double error = new ZeroOneLoss(outputs).Loss(predicted);
            #endregion

            Assert.AreEqual(0, error);
            Assert.IsTrue(predicted.IsEqual(outputs));
            Assert.IsTrue(ovo.Scores(inputs[0]).IsEqual(new double[] { 0.62, -0.25, -0.59, -0.62 }, 1e-2));
            Assert.IsTrue(ovo.Scores(inputs[1]).IsEqual(new double[] { -0.62, -0.57, -0.13, 0.62 }, 1e-2));
            Assert.IsTrue(ovo.Scores(inputs[2]).IsEqual(new double[] { -0.25, 0.63, -0.63, -0.51 }, 1e-2));
        }
        public void multiclass_calibration_generic_kernel()
        {
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 
                2, 2, 2, 2, 
            };

            // Create the multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning<IKernel>()
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new SequentialMinimalOptimization<IKernel>()
                {
                    UseKernelEstimation = false,
                    Kernel = Gaussian.FromGamma(0.5)
                }
            };

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);


            // Create the multi-class learning algorithm for the machine
            var calibration = new MulticlassSupportVectorLearning<IKernel>(machine)
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new ProbabilisticOutputCalibration<IKernel>(param.Model)
            };


            // Configure parallel execution options
            calibration.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Learn a machine
            calibration.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            int[] predicted = machine.Decide(inputs);

            // Get class scores for each sample
            double[] scores = machine.Score(inputs);

            // Get log-likelihoods (should be same as scores)
            double[][] logl = machine.LogLikelihoods(inputs);

            // Get probability for each sample
            double[][] prob = machine.Probabilities(inputs);

            // Compute classification error
            double error = new ZeroOneLoss(outputs).Loss(predicted);
            double loss = new CategoryCrossEntropyLoss(outputs).Loss(prob);
            

            //string str = logl.ToCSharp();

            double[] expectedScores =
            {
                1.87436400885238, 1.81168086449304, 1.74038320983522, 
                1.87436400885238, 1.81168086449304, 1.55446926953952, 
                1.67016543853596, 1.67016543853596, 1.83135194001403, 
                1.83135194001403, 1.59836868669125, 2.0618816310294 
            };

            double[][] expectedLogL =
            {
                new double[] { 1.87436400885238, -1.87436400885238, -1.7463646841257 },
                new double[] { 1.81168086449304, -1.81168086449304, -1.73142460658826 },
                new double[] { 1.74038320983522, -1.58848669816072, -1.74038320983522 },
                new double[] { 1.87436400885238, -1.87436400885238, -1.7463646841257 },
                new double[] { 1.81168086449304, -1.81168086449304, -1.73142460658826 },
                new double[] { -1.55446926953952, 1.55446926953952, -0.573599079216229 },
                new double[] { -0.368823000428743, 1.67016543853596, -1.67016543853596 },
                new double[] { -0.368823000428743, 1.67016543853596, -1.67016543853596 },
                new double[] { -1.83135194001403, -1.20039293330558, 1.83135194001403 },
                new double[] { -1.83135194001403, -1.20039293330558, 1.83135194001403 },
                new double[] { -0.894598978116595, -1.59836868669125, 1.59836868669125 },
                new double[] { -1.87336852014759, -2.0618816310294, 2.0618816310294 } 
            };

            double[][] expectedProbs =
            {
                new double[] { 0.95209908906855, 0.0224197237689656, 0.0254811871624848 },
                new double[] { 0.947314032745205, 0.0252864560196241, 0.0273995112351714 },
                new double[] { 0.937543314993345, 0.0335955309754816, 0.028861154031173 },
                new double[] { 0.95209908906855, 0.0224197237689656, 0.0254811871624848 },
                new double[] { 0.947314032745205, 0.0252864560196241, 0.0273995112351714 },
                new double[] { 0.0383670466237636, 0.859316640577158, 0.102316312799079 },
                new double[] { 0.111669460983068, 0.857937888238824, 0.0303926507781076 },
                new double[] { 0.111669460983068, 0.857937888238824, 0.0303926507781076 },
                new double[] { 0.0238971617859334, 0.0449126146360623, 0.931190223578004 },
                new double[] { 0.0238971617859334, 0.0449126146360623, 0.931190223578004 },
                new double[] { 0.0735735561383806, 0.0363980776342206, 0.890028366227399 },
                new double[] { 0.0188668069460003, 0.0156252941482294, 0.96550789890577 } 
            };

            // Must be exactly the same as test above
            Assert.AreEqual(0, error);
            Assert.AreEqual(0.5, ((Gaussian)machine[0].Value.Kernel).Gamma);
            Assert.AreEqual(0.5, ((Gaussian)machine[1].Value.Kernel).Gamma);
            Assert.AreEqual(0.5, ((Gaussian)machine[2].Value.Kernel).Gamma);
            Assert.AreEqual(1.0231652126930515, loss);
            Assert.IsTrue(predicted.IsEqual(outputs));
            Assert.IsTrue(expectedScores.IsEqual(scores, 1e-10));
            Assert.IsTrue(expectedLogL.IsEqual(logl, 1e-10));
            Assert.IsTrue(expectedProbs.IsEqual(prob, 1e-10));
        }
        public void multiclass_linear_smo_new_usage()
        {

            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Create a one-vs-one learning algorithm using LIBLINEAR's L2-loss SVC dual
            var teacher = new MulticlassSupportVectorLearning<Linear>();
            teacher.Learner = (p) => new SequentialMinimalOptimization<Linear>()
            {
                UseComplexityHeuristic = true
            };

#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                double actual = machine.Decide(inputs[i]);
                double expected = outputs[i];
                Assert.AreEqual(expected, actual);
            }
        }
        public void multiclass_linear_new_usage()
        {
            #region doc_learn_ldcd
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Create a one-vs-one multi-class SVM learning algorithm 
            var teacher = new MulticlassSupportVectorLearning<Linear>()
            {
                // using LIBLINEAR's L2-loss SVC dual for each SVM
                Learner = (p) => new LinearDualCoordinateDescent()
                {
                    Loss = Loss.L2
                }
            };

            // Configure parallel execution options
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            int[] predicted = machine.Decide(inputs);

            // Compute classification error
            double error = new ZeroOneLoss(outputs).Loss(predicted);
            #endregion

            Assert.AreEqual(0, error);
            Assert.IsTrue(predicted.IsEqual(outputs));
        }
        public void multiclass_gaussian_new_usage()
        {
            #region doc_learn_gaussian
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Create the multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning<Gaussian>()
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new SequentialMinimalOptimization<Gaussian>()
                {
                    // Estimate a suitable guess for the Gaussian kernel's parameters.
                    // This estimate can serve as a starting point for a grid search.
                    UseKernelEstimation = true
                }
            };

            // Configure parallel execution options
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            int[] predicted = machine.Decide(inputs);

            // Get class scores for each sample
            double[] scores = machine.Score(inputs);

            // Compute classification error
            double error = new ZeroOneLoss(outputs).Loss(predicted);
            #endregion

            // Get log-likelihoods (should be same as scores)
            double[][] logl = machine.LogLikelihoods(inputs);

            // Get probability for each sample
            double[][] prob = machine.Probabilities(inputs);

            // Compute classification error
            double loss = new CategoryCrossEntropyLoss(outputs).Loss(prob);

            string str = scores.ToCSharp();
 
            double[] expectedScores =
            { 
                1.00888999727541, 1.00303259868784, 1.00068403386636, 1.00888999727541,
                1.00303259868784, 1.00831890183328, 1.00831890183328, 0.843757409449037, 
                0.996768862332386, 0.996768862332386, 1.02627325826713, 1.00303259868784,
                0.996967401312164, 0.961947708617365, 1.02627325826713
            };

            double[][] expectedLogL =
            {
                new double[] { 1.00888999727541, -1.00888999727541, -1.00135670089335 },
                new double[] { 1.00303259868784, -0.991681098166717, -1.00303259868784 },
                new double[] { 1.00068403386636, -0.54983354268499, -1.00068403386636 },
                new double[] { 1.00888999727541, -1.00888999727541, -1.00135670089335 },
                new double[] { 1.00303259868784, -0.991681098166717, -1.00303259868784 },
                new double[] { -1.00831890183328, 1.00831890183328, -0.0542719287771535 },
                new double[] { -1.00831890183328, 1.00831890183328, -0.0542719287771535 },
                new double[] { -0.843757409449037, 0.843757409449037, -0.787899083913034 },
                new double[] { -0.178272229157676, 0.996768862332386, -0.996768862332386 },
                new double[] { -0.178272229157676, 0.996768862332386, -0.996768862332386 },
                new double[] { -1.02627325826713, -1.00323113766761, 1.02627325826713 },
                new double[] { -1.00303259868784, -0.38657999872922, 1.00303259868784 },
                new double[] { -0.996967401312164, -0.38657999872922, 0.996967401312164 },
                new double[] { -0.479189991343958, -0.961947708617365, 0.961947708617365 },
                new double[] { -1.02627325826713, -1.00323113766761, 1.02627325826713 } 
            };

            double[][] expectedProbs =
            {
                new double[] { 0.789324598208647, 0.104940932711551, 0.105734469079803 },
                new double[] { 0.78704862182644, 0.107080012017624, 0.105871366155937 },
                new double[] { 0.74223157627093, 0.157455631737191, 0.100312791991879 },
                new double[] { 0.789324598208647, 0.104940932711551, 0.105734469079803 },
                new double[] { 0.78704862182644, 0.107080012017624, 0.105871366155937 },
                new double[] { 0.0900153422818135, 0.676287261796794, 0.233697395921392 },
                new double[] { 0.0900153422818135, 0.676287261796794, 0.233697395921392 },
                new double[] { 0.133985810363445, 0.72433118122885, 0.141683008407705 },
                new double[] { 0.213703968297751, 0.692032433073136, 0.0942635986291124 },
                new double[] { 0.213703968297751, 0.692032433073136, 0.0942635986291124 },
                new double[] { 0.10192623206507, 0.104302095948601, 0.79377167198633 },
                new double[] { 0.0972161784678357, 0.180077937396817, 0.722705884135347 },
                new double[] { 0.0981785890979593, 0.180760971768703, 0.721060439133338 },
                new double[] { 0.171157270099157, 0.105617610634377, 0.723225119266465 },
                new double[] { 0.10192623206507, 0.104302095948601, 0.79377167198633 } 
            };

            Assert.AreEqual(0, error);
            Assert.AreEqual(4.5289447815997672, loss, 1e-10);
            Assert.IsTrue(predicted.IsEqual(outputs));
            Assert.IsTrue(expectedScores.IsEqual(scores, 1e-10));
            Assert.IsTrue(expectedLogL.IsEqual(logl, 1e-10));
            Assert.IsTrue(expectedProbs.IsEqual(prob, 1e-10));
        }
        public void kaggle_digits_with_compress()
        {
            string root = Environment.CurrentDirectory;
            var training = Properties.Resources.trainingsample;
            var validation = Properties.Resources.validationsample;

            var tset = readData(training);
            var observations = tset.Item1;
            var labels = tset.Item2;

            var teacher = new MulticlassSupportVectorLearning<Linear>();

            var svm = teacher.Learn(observations, labels);

            Assert.AreEqual(50, svm.Models[0][0].SupportVectors.Length);
            Assert.AreEqual(127, svm.Models[1][0].SupportVectors.Length);
            svm.Compress();
            Assert.AreEqual(1, svm.Models[0][0].SupportVectors.Length);
            Assert.AreEqual(1, svm.Models[1][0].SupportVectors.Length);

            {
                var trainingLoss = new ZeroOneLoss(labels)
                {
                    Mean = true
                };

                double error = trainingLoss.Loss(svm.Decide(observations));
                Assert.AreEqual(0.054, error);
            }

            {
                var vset = readData(validation);
                var validationData = vset.Item1;
                var validationLabels = vset.Item2;

                var validationLoss = new ZeroOneLoss(validationLabels)
                {
                    Mean = true
                };

                double val = validationLoss.Loss(svm.Decide(validationData));
                Assert.AreEqual(0.082, val);
            }
        }
        public void multiclass_new_usage_method_polynomial()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };

            var learner = new MulticlassSupportVectorLearning<Polynomial>()
            {
                Learner = (p) => new SequentialMinimalOptimization<Polynomial>()
                {
                    Model = p.Model,
                    Complexity = 1,
                    Kernel = new Polynomial(2)
                }
            };

            learner.ParallelOptions.MaxDegreeOfParallelism = 1;

            MulticlassSupportVectorMachine<Polynomial> msvm = learner.Learn(inputs, outputs);

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            int[] evals = { 8, 8, 7, 7, 7, 7, 6, 6 };

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                msvm.Method = MulticlassComputeMethod.Elimination;
                double actual = msvm.Decide(inputs[i]);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(evals[i], msvm.GetLastKernelEvaluations());
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                msvm.Method = MulticlassComputeMethod.Voting;
                double actual = msvm.Decide(inputs[i]);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(8, msvm.GetLastKernelEvaluations());
            }
        }
        public void multiclass_new_usage_method_linear()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };

            var learner = new MulticlassSupportVectorLearning<Linear>();

            learner.Learner = (_) => new LinearCoordinateDescent()
            {
                Complexity = 1
            };

            learner.ParallelOptions.MaxDegreeOfParallelism = 1;

            MulticlassSupportVectorMachine<Linear> msvm = learner.Learn(inputs, outputs);

            // Linear machines in compact form do not require kernel evaluations
            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                msvm.Method = MulticlassComputeMethod.Elimination;
                double actual = msvm.Decide(inputs[i]);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(0, msvm.GetLastKernelEvaluations());
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                msvm.Method = MulticlassComputeMethod.Voting;
                double actual = msvm.Decide(inputs[i]);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(0, msvm.GetLastKernelEvaluations());
            }
        }
Esempio n. 12
0
        /// <summary>
        ///   Creates a Support Vector Machine and estimate 
        ///   its parameters using a learning algorithm.
        /// </summary>
        /// 
        private void btnRunTraining_Click(object sender, EventArgs e)
        {
            if (dgvTrainingSource.Rows.Count == 0)
            {
                MessageBox.Show("Please load the training data before clicking this button");
                return;
            }

            lbStatus.Text = "Gathering data. This may take a while...";
            Application.DoEvents();



            // Extract inputs and outputs
            int rows = dgvTrainingSource.Rows.Count;
            double[][] input = new double[rows][];
            int[] output = new int[rows];
            for (int i = 0; i < rows; i++)
            {
                input[i] = (double[])dgvTrainingSource.Rows[i].Cells["colTrainingFeatures"].Value;
                output[i] = (int)dgvTrainingSource.Rows[i].Cells["colTrainingLabel"].Value;
            }

            // Create the chosen kernel function 
            // using the user interface parameters
            //
            IKernel kernel = createKernel();

            // Extract training parameters from the interface
            double complexity = (double)numComplexity.Value;
            double tolerance = (double)numTolerance.Value;
            int cacheSize = (int)numCache.Value;
            SelectionStrategy strategy = (SelectionStrategy)cbStrategy.SelectedItem;

            // Create the learning algorithm using the machine and the training data
            var ml = new MulticlassSupportVectorLearning<IKernel>()
            {
                // Configure the learning algorithm
                Learner = (param) => new SequentialMinimalOptimization<IKernel>()
                {
                    Complexity = complexity,
                    Tolerance = tolerance,
                    CacheSize = cacheSize,
                    Strategy = strategy,
                    Kernel = kernel
                }
            };


            lbStatus.Text = "Training the classifiers. This may take a (very) significant amount of time...";
            Application.DoEvents();

            Stopwatch sw = Stopwatch.StartNew();

            // Train the machines. It should take a while.
            ksvm = ml.Learn(input, output);

            // If we created a linear machine, compress the support vectors 
            // into one single parameter vector for increased performance:
            if (ksvm.Kernel is Linear)
            {
                ksvm.Compress();
            }

            sw.Stop();

            double error = new ZeroOneLoss(output)
            {
                Mean = true
            }.Loss(ksvm.Decide(input));


            lbStatus.Text = String.Format(
                "Training complete ({0}ms, {1}er). Click Classify to test the classifiers.",
                sw.ElapsedMilliseconds, error);

            // Update the interface status
            btnClassifyVoting.Enabled = true;
            btnClassifyElimination.Enabled = true;
            btnCalibration.Enabled = true;


            // Populate the information tab with the machines
            dgvMachines.Rows.Clear();
            int k = 1;
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < i; j++, k++)
                {
                    var machine = ksvm[i, j];

                    int sv = machine.SupportVectors == null ? 0 : machine.SupportVectors.Length;

                    int c = dgvMachines.Rows.Add(k, i + "-vs-" + j, sv, machine.Threshold);
                    dgvMachines.Rows[c].Tag = machine;
                }
            }

            // approximate size in bytes = 
            //   number of support vectors * number of doubles in a support vector * size of double
            int bytes = ksvm.SupportVectorUniqueCount * 1024 * sizeof(double);
            float megabytes = bytes / (1024 * 1024);
            lbSize.Text = String.Format("{0} ({1} MB)", ksvm.SupportVectorUniqueCount, megabytes);
        }
Esempio n. 13
0
        /// <summary>
        ///   Calibrates the current Support Vector Machine to produce
        ///   probabilistic outputs using ProbabilisticOutputLearning.
        /// </summary>
        /// 
        private void btnRunCalibration_Click(object sender, EventArgs e)
        {
            if (ksvm == null)
            {
                MessageBox.Show("Please train the machines first.");
                return;
            }

            // Extract inputs and outputs
            int rows = dgvTrainingSource.Rows.Count;
            double[][] input = new double[rows][];
            int[] output = new int[rows];
            for (int i = 0; i < rows; i++)
            {
                input[i] = (double[])dgvTrainingSource.Rows[i].Cells["colTrainingFeatures"].Value;
                output[i] = (int)dgvTrainingSource.Rows[i].Cells["colTrainingLabel"].Value;
            }



            // Create the calibration algorithm using the training data
            var ml = new MulticlassSupportVectorLearning<IKernel>()
            {
                Model = ksvm,

                // Configure the calibration algorithm
                Learner = (p) => new ProbabilisticOutputCalibration<IKernel>()
                {
                    Model = p.Model
                }
            };


            lbStatus.Text = "Calibrating the classifiers. This may take a (very) significant amount of time...";
            Application.DoEvents();

            Stopwatch sw = Stopwatch.StartNew();

            // Train the machines. It should take a while.
            ml.Learn(input, output);

            sw.Stop();

            double error = new ZeroOneLoss(output).Loss(ksvm.Decide(input));

            lbStatus.Text = String.Format(
                "Calibration complete ({0}ms, {1}er). Click Classify to test the classifiers.",
                sw.ElapsedMilliseconds, error);

            btnClassifyVoting.Enabled = true;
        }