Esempio n. 1
0
        public void RunTest3()
        {
            // Example XOR problem
            double[][] inputs =
            {
                new double[] { 0, 0 }, // 0 xor 0: 1 (label +1)
                new double[] { 0, 1 }, // 0 xor 1: 0 (label -1)
                new double[] { 1, 0 }, // 1 xor 0: 0 (label -1)
                new double[] { 1, 1 }  // 1 xor 1: 1 (label +1)
            };

            // Dichotomy SVM outputs should be given as [-1;+1]
            int[] labels =
            {
                1, -1, -1, 1
            };

            // Create a Kernel Support Vector Machine for the given inputs
            KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);

            // Instantiate a new learning algorithm for SVMs
            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, labels);

            // Set up the learning algorithm
            smo.Complexity = 1.0;

            // Run the learning algorithm
            double error = smo.Run();

            // Instantiate the probabilistic learning calibration
            ProbabilisticOutputLearning calibration = new ProbabilisticOutputLearning(svm, inputs, labels);

            // Run the calibration algorithm
            double loglikelihood = calibration.Run();


            // Compute the decision output for one of the input vectors,
            // while also retrieving the probability of the answer

            double probability;
            int    decision = svm.Compute(inputs[0], out probability);

            // At this point, decision is +1 with a probability of 75%

            Assert.AreEqual(1, decision);
            Assert.AreEqual(0.74999975815069375, probability);
        }
Esempio n. 2
0
        public void RunTest1()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] outputs =
            {
                -1,
                1,
                1,
                -1
            };

            KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(3.6), 2);

            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs);

            double error1 = smo.Run();

            Assert.AreEqual(0, error1);

            double[] distances = new double[outputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                int y = svm.Compute(inputs[i], out distances[i]);
                Assert.AreEqual(outputs[i], y);
            }


            ProbabilisticOutputLearning target = new ProbabilisticOutputLearning(svm, inputs, outputs);

            double ll0 = target.LogLikelihood(inputs, outputs);

            double ll1 = target.Run();

            double ll2 = target.LogLikelihood(inputs, outputs);

            Assert.AreEqual(3.4256203116918824, ll1);
            Assert.AreEqual(ll1, ll2);
            Assert.IsTrue(ll1 > ll0);

            double[] probs = new double[outputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                int y = svm.Compute(inputs[i], out probs[i]);
                Assert.AreEqual(outputs[i], y);
            }

            Assert.AreEqual(0.25, probs[0], 1e-5);
            Assert.AreEqual(0.75, probs[1], 1e-5);
            Assert.AreEqual(0.75, probs[2], 1e-5);
            Assert.AreEqual(0.25, probs[3], 1e-5);

            foreach (var p in probs)
            {
                Assert.IsFalse(Double.IsNaN(p));
            }
        }
        // IKernel kernel;



        public double BuildTheModel(double[][] inputs, int[] outputs, int ClassNum, ConfigurationFieldClassifier config)
        {
            Reset();

            IKernel kernal = null;

            switch (config.AccordConfiguration.Kernel)
            {
            case KernelTypes.Gaussian:
                kernal = new Gaussian(config.AccordConfiguration.GaussianKernel.Sigma);
                break;

            case KernelTypes.Polynomial:
                kernal = new Polynomial(config.AccordConfiguration.PolynominalKernel.Degree, config.AccordConfiguration.PolynominalKernel.Constant);
                break;

            case KernelTypes.ChiSquare:
                kernal = new ChiSquare();
                break;

            case KernelTypes.HistogramIntersction:
                kernal = new HistogramIntersection();
                break;

            default:
                break;
            }


            Tuple <double, double> estimatedComplexity = SequentialMinimalOptimization.EstimateComplexity(kernal, inputs, outputs);

            machine = new MulticlassSupportVectorMachine(inputs[0].Length, kernal, ClassNum);


            teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            // Configure the learning algorithm
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs);
                smo.Complexity = config.AccordConfiguration.Complexity;
                smo.Tolerance  = config.AccordConfiguration.Tolerance;
                smo.CacheSize  = config.AccordConfiguration.CacheSize;
                smo.Strategy   = (Accord.MachineLearning.VectorMachines.Learning.SelectionStrategy)((int)(config.AccordConfiguration.SelectionStrategy));
                //  smo.UseComplexityHeuristic = true;
                //  smo.PositiveWeight = 1;
                // smo.NegativeWeight = 1;
                smo.Run();
                var probabilisticOutputLearning = new ProbabilisticOutputLearning(svm, classInputs, classOutputs);
                return(probabilisticOutputLearning);

                // return smo;
            };

            // Train the machines. It should take a while.
            //   Thread.Sleep(10000);
            //#if temp
            double error = teacher.Run();

            //#endif
            //   return 0;



            return(error);
        }
Esempio n. 4
0
        // IKernel kernel;



        public double BuildTheModel(double[][] inputs, int[] outputs, int ClassNum, ConfigurationFieldClassifier config)
        {
            cpuCounter.CategoryName = "Processor";
            cpuCounter.CounterName  = "% Processor Time";
            cpuCounter.InstanceName = "_Total";


            Reset();
            _usenongoldenclass = config.FeatureExtraction.UseNonGoldenClass;
            //  scalers = scalresin;

            IKernel kernal = null;

            switch (config.AccordConfiguration.Kernel)
            {
            case KernelTypes.Gaussian:
                kernal = new Gaussian(config.AccordConfiguration.GaussianKernel.Sigma);
                break;

            case KernelTypes.Polynomial:
                kernal = new Polynomial(config.AccordConfiguration.PolynominalKernel.Degree, config.AccordConfiguration.PolynominalKernel.Constant);
                break;

            case KernelTypes.ChiSquare:
                kernal = new ChiSquare();
                break;

            case KernelTypes.HistogramIntersction:
                kernal = new HistogramIntersection();
                break;

            default:
                break;
            }



            if (ClassNum > 2)
            {
                m_machine = new MulticlassSupportVectorMachine(inputs[0].Length, kernal, ClassNum);

                m_teacher = (new MulticlassSupportVectorLearning((MulticlassSupportVectorMachine)m_machine, inputs, outputs));

                (m_teacher as MulticlassSupportVectorLearning).Algorithm = (svm, classInputs, classOutputs, i, j) =>
                {
                    var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs);
                    smo.Complexity = config.AccordConfiguration.Complexity;
                    smo.Tolerance  = config.AccordConfiguration.Tolerance;
                    smo.CacheSize  = config.AccordConfiguration.CacheSize;
                    smo.Strategy   = (Accord.MachineLearning.VectorMachines.Learning.SelectionStrategy)((int)(config.AccordConfiguration.SelectionStrategy));
                    //  smo.UseComplexityHeuristic = true;
                    //  smo.PositiveWeight = 1;

                    int k = 0;
                    while (cpuCounter.NextValue() > 50)
                    {
                        Thread.Sleep(50);
                        k++;
                        if (k > 30000)
                        {
                            break;
                        }
                    }
                    // smo.NegativeWeight = 1;


                    smo.Run();
                    var probabilisticOutputLearning = new ProbabilisticOutputLearning(svm, classInputs, classOutputs);
                    return(probabilisticOutputLearning);

                    // return smo;
                };
            }
            else
            {
                // FIX TO BASE TYPES THAN RUN THAN MAKE OTHER 2 CHANGES from LATEST  - line ... and CLUSTER AND RUNTEST.. and check again...

                m_machine = new SupportVectorMachine(inputs[0].Length);
                m_teacher = new SequentialMinimalOptimization((SupportVectorMachine)m_machine, inputs, outputs);
                (m_teacher as SequentialMinimalOptimization).Complexity = config.AccordConfiguration.Complexity;;
                (m_teacher as SequentialMinimalOptimization).Tolerance  = config.AccordConfiguration.Tolerance;
                (m_teacher as SequentialMinimalOptimization).CacheSize  = config.AccordConfiguration.CacheSize;
                (m_teacher as SequentialMinimalOptimization).Strategy   = (Accord.MachineLearning.VectorMachines.Learning.SelectionStrategy)((int)(config.AccordConfiguration.SelectionStrategy));
                (m_teacher as SequentialMinimalOptimization).Complexity = config.AccordConfiguration.Complexity;;
            }



            // Configure the learning algorithm


            // Train the machines. It should take a while.
            //   Thread.Sleep(10000);
//#if temp
            double error = m_teacher.Run();

//#endif
            //   return 0;



            return(error);
        }