Пример #1
0
        public void gh_758()
        {
            // Let's say we have the following data to be classified into three
            // non -mutually-exclusive possible classes. Those are the samples:
            //
            int[][] inputs =
            {
                //               input         output
                new int[] { 0, 1, 1, 0 }, //  0
                new int[] { 0, 1, 0, 0 }, //  0
                new int[] { 0, 0, 1, 0 }, //  0
                new int[] { 0, 1, 1, 0 }, //  0, 1
                new int[] { 0, 1, 0, 0 }, //  0, 1
                new int[] { 1, 0, 0, 0 }, //     1
                new int[] { 1, 0, 0, 0 }, //     1
                new int[] { 1, 0, 0, 1 }, //     1, 2
                new int[] { 0, 0, 0, 1 }, //     1, 2
                new int[] { 0, 0, 0, 1 }, //     1, 2
                new int[] { 1, 1, 1, 1 }, //        2
                new int[] { 1, 0, 1, 1 }, //        2
                new int[] { 1, 1, 0, 1 }, //        2
                new int[] { 0, 1, 1, 1 }, //        2
                new int[] { 1, 1, 1, 1 }, //        2
            };

            int[][] outputs = // those are the class labels
            {
                new[] { 1, 0, 0 },
                new[] { 1, 0, 0 },
                new[] { 1, 0, 0 },
                new[] { 1, 1, 0 },
                new[] { 1, 1, 0 },
                new[] { 0, 1, 0 },
                new[] { 0, 1, 0 },
                new[] { 0, 1, 1 },
                new[] { 0, 1, 1 },
                new[] { 0, 1, 1 },
                new[] { 0, 0, 1 },
                new[] { 0, 0, 1 },
                new[] { 0, 0, 1 },
                new[] { 0, 0, 1 },
                new[] { 0, 0, 1 },
            };

            // Create a new Naive Bayes teacher
            var teacher = new NaiveBayesLearning();

            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;

            var bayes = teacher.Learn(inputs, outputs);

            double[][] prediction = bayes.Probabilities(inputs);

            // Teach the Naive Bayes model. The error should be zero:
            double error = new BinaryCrossEntropyLoss(outputs).Loss(prediction);

            Assert.AreEqual(11.566909963298386, error, 1e-8);

            Assert.IsTrue(teacher.optimized);
        }
Пример #2
0
        public void learn_test()
        {
            #region doc_learn
            double[][] inputs =        // Example XOR problem
            {
                new double[] { 0, 0 }, // 0 xor 0: 1 (label +1)
                new double[] { 0, 1 }, // 0 xor 1: 0 (label -1)
                new double[] { 1, 0 }, // 1 xor 0: 0 (label -1)
                new double[] { 1, 1 }  // 1 xor 1: 1 (label +1)
            };

            int[] outputs = // XOR outputs
            {
                1, 0, 0, 1
            };

            // Instantiate a new SMO learning algorithm for SVMs
            var smo = new SequentialMinimalOptimization <Gaussian>()
            {
                Kernel     = new Gaussian(0.1),
                Complexity = 1.0
            };

            // Learn a SVM using the algorithm
            var svm = smo.Learn(inputs, outputs);

            // Predict labels for each input sample
            bool[] predicted = svm.Decide(inputs);

            // Compute classification error
            double error = new ZeroOneLoss(outputs).Loss(predicted);

            // Instantiate the probabilistic calibration (using Platt's scaling)
            var calibration = new ProbabilisticOutputCalibration <Gaussian>(svm);

            // Run the calibration algorithm
            calibration.Learn(inputs, outputs); // returns the same machine

            // Predict probabilities of each input sample
            double[] probabilities = svm.Probability(inputs);

            // Compute the error based on a hard decision
            double loss = new BinaryCrossEntropyLoss(outputs).Loss(probabilities);

            // Compute the decision output for one of the input vectors,
            // while also retrieving the probability of the answer

            bool   decision;
            double probability = svm.Probability(inputs[0], out decision);
            #endregion

            // At this point, decision is +1 with a probability of 75%

            Assert.AreEqual(true, decision);
            Assert.AreEqual(0, error);
            Assert.AreEqual(5.5451735748925355, loss);
            Assert.AreEqual(0.74999975815069375, probability, 1e-10);
            Assert.IsTrue(svm.IsProbabilistic);
            Assert.AreEqual(-1.0986109988055595, svm.Weights[0]);
            Assert.AreEqual(1.0986109988055595, svm.Weights[1]);
            Assert.AreEqual(-1.0986109988055595, svm.Weights[2]);
            Assert.AreEqual(1.0986109988055595, svm.Weights[3]);
        }
Пример #3
0
        public void gh_758()
        {
            // Let's say we have the following data to be classified into three
            // non -mutually-exclusive possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0, 1
                new double[] { 0, 1, 0, 0 }, //  0, 1
                new double[] { 1, 0, 0, 0 }, //     1
                new double[] { 1, 0, 0, 0 }, //     1
                new double[] { 1, 0, 0, 1 }, //     1, 2
                new double[] { 0, 0, 0, 1 }, //     1, 2
                new double[] { 0, 0, 0, 1 }, //     1, 2
                new double[] { 1, 1, 1, 1 }, //        2
                new double[] { 1, 0, 1, 1 }, //        2
                new double[] { 1, 1, 0, 1 }, //        2
                new double[] { 0, 1, 1, 1 }, //        2
                new double[] { 1, 1, 1, 1 }, //        2
            };

            int[][] outputs = // those are the class labels
            {
                new[] { 1, 0, 0 },
                new[] { 1, 0, 0 },
                new[] { 1, 0, 0 },
                new[] { 1, 1, 0 },
                new[] { 1, 1, 0 },
                new[] { 0, 1, 0 },
                new[] { 0, 1, 0 },
                new[] { 0, 1, 1 },
                new[] { 0, 1, 1 },
                new[] { 0, 1, 1 },
                new[] { 0, 0, 1 },
                new[] { 0, 0, 1 },
                new[] { 0, 0, 1 },
                new[] { 0, 0, 1 },
                new[] { 0, 0, 1 },
            };

            // Create a new Naive teacher for 4-dimensional Gaussian distributions
            var teacher = new NaiveBayesLearning <NormalDistribution, NormalOptions, double>()
            {
                Options = new IndependentOptions <NormalOptions>()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-10
                    }
                }
            };

            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;

            var bayes = teacher.Learn(inputs, outputs);

            double[][] prediction = bayes.Probabilities(inputs);

            // Teach the Naive Bayes model. The error should be zero:
            double error = new BinaryCrossEntropyLoss(outputs).Loss(prediction);

            Assert.AreEqual(78.465768833015233, error, 1e-8);

            Assert.IsTrue(teacher.optimized);
        }
        public void learn_test()
        {
            #region doc_learn
            double[][] inputs = // Example XOR problem
            {
                new double[] { 0, 0 }, // 0 xor 0: 1 (label +1)
                new double[] { 0, 1 }, // 0 xor 1: 0 (label -1)
                new double[] { 1, 0 }, // 1 xor 0: 0 (label -1)
                new double[] { 1, 1 }  // 1 xor 1: 1 (label +1)
            };

            int[] outputs = // XOR outputs
            {
                1, 0, 0, 1
            };

            // Instantiate a new SMO learning algorithm for SVMs
            var smo = new SequentialMinimalOptimization<Gaussian>()
            {
                Kernel = new Gaussian(0.1),
                Complexity = 1.0
            };

            // Learn a SVM using the algorithm
            var svm = smo.Learn(inputs, outputs);

            // Predict labels for each input sample
            bool[] predicted = svm.Decide(inputs);

            // Compute classification error
            double error = new ZeroOneLoss(outputs).Loss(predicted);
            
            // Instantiate the probabilistic calibration (using Platt's scaling)
            var calibration = new ProbabilisticOutputCalibration<Gaussian>(svm);

            // Run the calibration algorithm
            calibration.Learn(inputs, outputs); // returns the same machine

            // Predict probabilities of each input sample
            double[] probabilities = svm.Probability(inputs);

            // Compute the error based on a hard decision
            double loss = new BinaryCrossEntropyLoss(outputs).Loss(probabilities);

            // Compute the decision output for one of the input vectors,
            // while also retrieving the probability of the answer

            bool decision;
            double probability = svm.Probability(inputs[0], out decision);
            #endregion

            // At this point, decision is +1 with a probability of 75%

            Assert.AreEqual(true, decision);
            Assert.AreEqual(0, error);
            Assert.AreEqual(5.5451735748925355, loss);
            Assert.AreEqual(0.74999975815069375, probability, 1e-10);
            Assert.IsTrue(svm.IsProbabilistic);
            Assert.AreEqual(-1.0986109988055595, svm.Weights[0]);
            Assert.AreEqual(1.0986109988055595, svm.Weights[1]);
            Assert.AreEqual(-1.0986109988055595, svm.Weights[2]);
            Assert.AreEqual(1.0986109988055595, svm.Weights[3]);
        }