Ejemplo n.º 1
0
        public void ConstructorTest()
        {
            double[][] inputs =
            {
                new double[] {  10, 42 },
                new double[] { 162, 96 },
                new double[] { 125, 20 },
                new double[] {  96,  6 },
                new double[] {   2, 73 },
                new double[] {  52, 51 },
                new double[] {  71, 49 },
            };

            int[] outputs =
            {
                -1, -1, +1, +1, -1, -1, +1
            };


            var classifier = new Boost <DecisionStump>();

            var teacher = new AdaBoost <DecisionStump>(classifier)
            {
                Creation = (weights) =>
                {
                    var stump = new DecisionStump(2);
                    stump.Learn(inputs, outputs, weights);
                    return(stump);
                },

                Iterations = 5,
                Tolerance  = 1e-3
            };


            double error = teacher.Run(inputs, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(5, classifier.Models.Count);
            Assert.AreEqual(0.16684734250395147, classifier.Models[0].Weight);
            Assert.AreEqual(0.22329026900109736, classifier.Models[1].Weight);
            Assert.AreEqual(0.28350372170582383, classifier.Models[2].Weight);
            Assert.AreEqual(0.16684734250395139, classifier.Models[3].Weight);
            Assert.AreEqual(0.15951132428517592, classifier.Models[4].Weight);

            int[] actual = new int[outputs.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = classifier.Compute(inputs[i]);
            }

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(outputs[i], actual[i]);
            }
        }
Ejemplo n.º 2
0
        public void ConstructorTest2()
        {
            var dataset = new YinYang();

            double[][] inputs   = dataset.Instances;
            bool[]     outputs2 = dataset.ClassLabels;

            int[] outputs = outputs2.Apply(x => x ? 1 : 0);

            var classifier = new Boost <Weak <LogisticRegression> >();

            var teacher = new AdaBoost <Weak <LogisticRegression> >(classifier)
            {
                Creation = (weights) =>
                {
                    LogisticRegression reg = new LogisticRegression(2, intercept: 1);

                    IterativeReweightedLeastSquares irls = new IterativeReweightedLeastSquares(reg)
                    {
                        ComputeStandardErrors = false
                    };

                    for (int i = 0; i < 50; i++)
                    {
                        irls.Run(inputs, outputs, weights);
                    }

                    return(new Weak <LogisticRegression>(reg, (s, x) => Math.Sign(s.Compute(x) - 0.5)));
                },

                Iterations = 50,
                Tolerance  = 1e-5,
            };



            double error = teacher.Run(inputs, outputs);


            Assert.AreEqual(0.11, error);

            Assert.AreEqual(2, classifier.Models.Count);
            Assert.AreEqual(0.63576818449825168, classifier.Models[0].Weight);
            Assert.AreEqual(0.36423181550174832, classifier.Models[1].Weight);

            int[] actual = new int[outputs.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = classifier.Compute(inputs[i]);
            }

            //for (int i = 0; i < actual.Length; i++)
            //    Assert.AreEqual(outputs[i], actual[i]);
        }
Ejemplo n.º 3
0
        public void ConstructorTest2()
        {
            double[][] inputs  = LeastSquaresLearningTest.yinyang.GetColumns(0, 1).ToArray();
            int[]      outputs = LeastSquaresLearningTest.yinyang.GetColumn(2).ToInt32();

            var outputs2 = outputs.Apply(x => x > 0 ? 1.0 : 0.0);

            var classifier = new Boost <Weak <LogisticRegression> >();

            var teacher = new AdaBoost <Weak <LogisticRegression> >(classifier)
            {
                Creation = (weights) =>
                {
                    LogisticRegression reg = new LogisticRegression(2, intercept: 1);

                    IterativeReweightedLeastSquares irls = new IterativeReweightedLeastSquares(reg)
                    {
                        ComputeStandardErrors = false
                    };

                    for (int i = 0; i < 50; i++)
                    {
                        irls.Run(inputs, outputs2, weights);
                    }

                    return(new Weak <LogisticRegression>(reg, (s, x) => Math.Sign(s.Compute(x) - 0.5)));
                },

                Iterations = 50,
                Tolerance  = 1e-5,
            };



            double error = teacher.Run(inputs, outputs);


            Assert.AreEqual(0.11, error);

            Assert.AreEqual(2, classifier.Models.Count);
            Assert.AreEqual(0.63576818449825168, classifier.Models[0].Weight);
            Assert.AreEqual(0.36423181550174832, classifier.Models[1].Weight);

            int[] actual = new int[outputs.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = classifier.Compute(inputs[i]);
            }

            //for (int i = 0; i < actual.Length; i++)
            //    Assert.AreEqual(outputs[i], actual[i]);
        }
Ejemplo n.º 4
0
        public void ConstructorTest()
        {
            double[][] inputs =
            {
                new double[] { 10, 42 },
                new double[] { 162, 96 },
                new double[] { 125, 20 },
                new double[] { 96, 6 },
                new double[] { 2, 73 },
                new double[] { 52, 51 },
                new double[] { 71, 49 },
            };

            int[] outputs = 
            {
                -1, -1, +1, +1, -1, -1, +1
            };


            var classifier = new Boost<DecisionStump>();

            var teacher = new AdaBoost<DecisionStump>(classifier)
            {
                Creation = (weights) =>
                {
                    var stump = new DecisionStump(2);
                    stump.Learn(inputs, outputs, weights);
                    return stump;
                },

                Iterations = 5,
                Tolerance = 1e-3
            };


            double error = teacher.Run(inputs, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(5, classifier.Models.Count);
            Assert.AreEqual(0.16684734250395147, classifier.Models[0].Weight);
            Assert.AreEqual(0.22329026900109736, classifier.Models[1].Weight);
            Assert.AreEqual(0.28350372170582383, classifier.Models[2].Weight);
            Assert.AreEqual(0.16684734250395139, classifier.Models[3].Weight);
            Assert.AreEqual(0.15951132428517592, classifier.Models[4].Weight);

            int[] actual = new int[outputs.Length];
            for (int i = 0; i < actual.Length; i++)
                actual[i] = classifier.Compute(inputs[i]);

            for (int i = 0; i < actual.Length; i++)
                Assert.AreEqual(outputs[i], actual[i]);
        }
Ejemplo n.º 5
0
        public void ConstructorTest2()
        {
            double[][] inputs = LeastSquaresLearningTest.yinyang.GetColumns(0, 1).ToArray();
            int[] outputs = LeastSquaresLearningTest.yinyang.GetColumn(2).ToInt32();

            var outputs2 = outputs.Apply(x => x > 0 ? 1.0 : 0.0);

            var classifier = new Boost<Weak<LogisticRegression>>();

            var teacher = new AdaBoost<Weak<LogisticRegression>>(classifier)
            {
                Creation = (weights) =>
                {
                    LogisticRegression reg = new LogisticRegression(2, intercept: 1);

                    IterativeReweightedLeastSquares irls = new IterativeReweightedLeastSquares(reg)
                    {
                        ComputeStandardErrors = false
                    };

                    for (int i = 0; i < 50; i++)
                        irls.Run(inputs, outputs2, weights);

                    return new Weak<LogisticRegression>(reg, (s, x) => Math.Sign(s.Compute(x) - 0.5));
                },

                Iterations = 50,
                Tolerance = 1e-5,
            };



            double error = teacher.Run(inputs, outputs);


            Assert.AreEqual(0.11, error);

            Assert.AreEqual(2, classifier.Models.Count);
            Assert.AreEqual(0.63576818449825168, classifier.Models[0].Weight);
            Assert.AreEqual(0.36423181550174832, classifier.Models[1].Weight);

            int[] actual = new int[outputs.Length];
            for (int i = 0; i < actual.Length; i++)
                actual[i] = classifier.Compute(inputs[i]);

            //for (int i = 0; i < actual.Length; i++)
            //    Assert.AreEqual(outputs[i], actual[i]);
        }
Ejemplo n.º 6
0
        public void ConstructorTest()
        {
            // Let's say we want to classify the following 2-dimensional
            // data samples into 2 possible classes, either true or false:
            double[][] inputs =
            {
                new double[] {  10, 42 },
                new double[] { 162, 96 },
                new double[] { 125, 20 },
                new double[] {  96,  6 },
                new double[] {   2, 73 },
                new double[] {  52, 51 },
                new double[] {  71, 49 },
            };

            // And those are their associated class labels
            int[] outputs =
            {
                -1, -1, +1, +1, -1, -1, +1
            };


            // First, we create a classsifier using:
            var classifier = new Boost <DecisionStump>();

            // Now, we can create a AdaBoost learning algorithm as:
            var teacher = new AdaBoost <DecisionStump>(classifier)
            {
                Creation = (weights) =>
                {
                    var stump = new DecisionStump(2);
                    stump.Learn(inputs, outputs, weights);
                    return(stump);
                },

                // Train until:
                MaxIterations = 5,
                Tolerance     = 1e-3
            };

            // Now, we can use the Run method to learn:
            double error = teacher.Run(inputs, outputs); // error should be zero.

            // Now, we can compute the model outputs for new samples using
            int y = classifier.Compute(new double[] { 71, 48 }); // should be 1

            Assert.AreEqual(1, y);

            Assert.AreEqual(0, error);

            Assert.AreEqual(5, classifier.Models.Count);
            Assert.AreEqual(0.16684734250395147, classifier.Models[0].Weight);
            Assert.AreEqual(0.22329026900109736, classifier.Models[1].Weight);
            Assert.AreEqual(0.28350372170582383, classifier.Models[2].Weight);
            Assert.AreEqual(0.16684734250395139, classifier.Models[3].Weight);
            Assert.AreEqual(0.15951132428517592, classifier.Models[4].Weight);

            int[] actual = new int[outputs.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = classifier.Compute(inputs[i]);
            }

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(outputs[i], actual[i]);
            }
        }