Exemple #1
0
        public void multilabelSVM()
        {
            var teacher = new MulticlassSupportVectorLearning <Gaussian>()
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                {
                    // Estimate a suitable guess for the Gaussian kernel's parameters.
                    // This estimate can serve as a starting point for a grid search.
                    UseKernelEstimation = true
                }
            };

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);


            // Create the multi-class learning algorithm for the machine
            var calibration = new MulticlassSupportVectorLearning <Gaussian>()
            {
                Model = machine, // We will start with an existing machine

                // Configure the learning algorithm to use Platt's calibration
                Learner = (param) => new ProbabilisticOutputCalibration <Gaussian>()
                {
                    Model = param.Model // Start with an existing machine
                }
            };


            // Configure parallel execution options
            calibration.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Learn a machine
            calibration.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            int[] predicted = machine.Decide(inputs);

            // Get class scores for each sample
            double[] scores = machine.Score(inputs);

            // Get log-likelihoods (should be same as scores)
            double[][] logl = machine.LogLikelihoods(inputs);

            // Get probability for each sample
            double[][] prob = machine.Probabilities(inputs);

            // Compute classification error
            double error = new ZeroOneLoss(outputs).Loss(predicted);
            double loss  = new CategoryCrossEntropyLoss(outputs).Loss(prob);

            message += "SVM Validacja\n";
            message += "error " + error.ToString() + "\n";
            message += "loss " + loss.ToString() + "\n\n";
        }
Exemple #2
0
        public void Learn_Clicked(object sender, EventArgs args)
        {
            Task.Factory.StartNew(() =>
            {
                var bow = CreateBow();
                foreach (var image in Images)
                {
                    TrainingData.Add(GetData(image, bow));
                }

                var kernel  = new Polynomial();
                var teacher = new MulticlassSupportVectorLearning <IKernel>()
                {
                    Kernel  = kernel,
                    Learner = (param) => new SequentialMinimalOptimization <IKernel>()
                    {
                        Kernel = kernel
                    }
                };

                var svm   = teacher.Learn(TrainingData.ToArray(), Tags.ToArray());
                var error = new ZeroOneLoss(Tags.ToArray()).Loss(svm.Decide(TrainingData.ToArray()));
                Error.Dispatcher.Invoke(() => Error.Text = error.ToString());

                //var kernel = new Polynomial(16, 5);
                //var complexity = CalculateComplexity(kernel);
                //var ml = new MulticlassSupportVectorLearning<IKernel>()
                //{
                //    Learner = (param) => new SequentialMinimalOptimization<IKernel>()
                //    {
                //        Complexity = complexity,
                //        Kernel = kernel
                //    }
                //};

                //machine = ml.Learn(TrainingData.ToArray(), Tags.ToArray());
                //var result = machine.Decide(TrainingData.ToArray());
                //var error = new ZeroOneLoss(Tags.ToArray())
                //{
                //    Mean = true
                //}.Loss(result);

                //Error.Dispatcher.Invoke(() => Error.Text = error.ToString());
            });
        }