コード例 #1
0
        public void ComputeTest2()
        {
            // XOR
            double[][] inputs =
            {
                new double[] { 0, 0 },
                new double[] { 0, 1 },
                new double[] { 1, 0 },
                new double[] { 1, 1 }
            };

            int[] labels =
            {
                -1,
                1,
                1,
                -1
            };

            KernelSupportVectorMachine    machine = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);
            SequentialMinimalOptimization smo     = new SequentialMinimalOptimization(machine, inputs, labels);

            smo.Complexity = 1;
            double error = smo.Run();

            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0])));
            Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[1])));
            Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[2])));
            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[3])));

            Assert.AreEqual(error, 0);
        }
コード例 #2
0
        public void DynamicalTimeWarpingConstructorTest2()
        {
            // Declare some testing data
            double[][] inputs =
            {
                // Class -1
                new double[] { 0, 1, 1, 0 },
                new double[] { 0, 0, 1, 0 },
                new double[] { 0, 1, 1,1, 0 },
                new double[] { 0, 1,0 },

                // Class +1
                new double[] { 1, 0, 0, 1 },
                new double[] { 1, 1, 0, 1 },
                new double[] { 1, 0, 0,0, 1 },
                new double[] { 1, 0,1 },
                new double[] { 1, 0, 0,0, 1, 1 }
            };

            int[] outputs =
            {
                -1, -1, -1,-1, // First four sequences are of class -1
                1,   1,  1,1, 1 // Last five sequences are of class +1
            };


            // Set the parameters of the kernel
            double alpha             = 1.0;
            int    degree            = 1;
            int    innerVectorLength = 1;

            // Create the kernel. Note that the input vector will be given out automatically
            DynamicTimeWarping target = new DynamicTimeWarping(innerVectorLength, alpha, degree);


            // When using variable-length kernels, specify 0 as the input length.
            KernelSupportVectorMachine svm = new KernelSupportVectorMachine(target, 0);

            // Create the Sequential Minimal Optimization as usual
            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs);

            smo.Complexity = 1.5;
            double error = smo.Run();


            // Check if the model has learnt the sequences correctly.
            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual   = System.Math.Sign(svm.Compute(inputs[i]));
                Assert.AreEqual(expected, actual);
            }

            // Testing new sequences
            Assert.AreEqual(-1, System.Math.Sign(svm.Compute(new double[] { 0, 1, 1, 0, 0 })));
            Assert.AreEqual(+1, System.Math.Sign(svm.Compute(new double[] { 1, 1, 0, 0, 1, 1 })));
        }
コード例 #3
0
        public void TrainTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputs =         // (x, y)
            {
                new double[] { 0,  1 }, // 2*0 + 1 =  1
                new double[] { 4,  3 }, // 2*4 + 3 = 11
                new double[] { 8, -8 }, // 2*8 - 8 =  8
                new double[] { 2,  2 }, // 2*2 + 2 =  6
                new double[] { 6,  1 }, // 2*6 + 1 = 13
                new double[] { 5,  4 }, // 2*5 + 4 = 14
                new double[] { 9,  1 }, // 2*9 + 1 = 19
                new double[] { 1,  6 }, // 2*1 + 6 =  8
            };

            double[] outputs = // f(x, y)
            {
                1, 11, 8, 6, 13, 14, 19, 8
            };

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            var machine = new KernelSupportVectorMachine(new Polynomial(2), inputs: 2);

            // Create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimizationRegression(machine, inputs, outputs)
            {
                Complexity = 100
            };

            // Run the learning algorithm
            double error = learn.Run();

            // Compute the answer for one particular example
            double fxy = machine.Compute(inputs[0]); // 1.0003849827673186

            // Check for correct answers
            double[] answers = new double[inputs.Length];
            for (int i = 0; i < answers.Length; i++)
            {
                answers[i] = machine.Compute(inputs[i]);
            }

            Assert.AreEqual(1.0, fxy, 1e-2);
            for (int i = 0; i < outputs.Length; i++)
            {
                Assert.AreEqual(outputs[i], answers[i], 1e-2);
            }
        }
コード例 #4
0
        private void timer3_Tick(object sender, EventArgs e)
        {
            double[] dt  = M.Current();
            double   sum = 0;

            for (int j = 0; j < VectorSize - 2; j++)
            {
                sum += dt[j];
            }
            for (int i = 2; i < VectorSize - 2; i++)
            {
                dt[i] = dt[i] / 1000;
            }

            if (dt[0] > 2000)
            {
                dt[0] = 2000;
            }
            if (dt[1] > 2000)
            {
                dt[1] = 2000;
            }
            dt[0] = dt[0] / 2000;
            dt[1] = dt[1] / 2000;
            dt[VectorSize - 2] = dt[VectorSize - 2] / 100;
            dt[VectorSize - 1] = dt[VectorSize - 1] / 100;
            double d = svm.Compute(dt);

            textBox12.Text = d.ToString();
            if (Math.Abs(d * 15) < 60)
            {
                if (Points < pictureBox1.Width - 1)
                {
                    points.Add(new PointF(Points, 60 + (int)(15 * d)));
                    Points++;
                }
                else
                {
                    points.RemoveAt(0);
                    for (int i = 0; i < points.Count; i++)
                    {
                        points[i] = new PointF(points[i].X - 1, points[i].Y);
                    }
                    points.Add(new PointF(Points, 60 + (int)(15 * d)));
                }
            }
            // File.AppendAllText("Res.txt", d.ToString()+"\r\n");
            pictureBox1.Refresh();
        }
コード例 #5
0
        public void SVMTestData()
        {
            List <DataSet> dataset = new List <DataSet>();

            FillOnes(dataset);
            FillZeros(dataset);

            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(
                new Polynomial(2), 25);

            var learn = new SequentialMinimalOptimization(machine, dataset.ToArray());

            double[] error = learn.Run();

            double[] output = machine.Compute(dataset.ToArray());


            double[] expected = new double[dataset.Count];
            for (int i = 0; i < dataset.Count; i++)
            {
                output[i]   = Math.Round(output[i]);
                expected[i] = dataset[i].Expected;
            }

            CollectionAssert.AreEqual(expected, output);
        }
コード例 #6
0
        public void testSVM()
        {
            if (_svm == null)
            {
                return;
            }

            int[] output = new int[_outputs.Length];

            // Compute the machine outputs
            for (int i = 0; i < _outputs.Length; i++)
            {
                double actual    = _outputs[i];
                double predicted = _svm.Compute(_inputs[i]);
                // System.Console.WriteLine(Math.Sign(actual) + "   " + _names[i] + "   =>   " + predicted + "   =>   " + Math.Sign(predicted));
                output[i] = System.Math.Sign(predicted);
            }

            // Use confusion matrix to compute some performance metrics
            ConfusionMatrix       confusionMatrix = new ConfusionMatrix(output, _outputs, 1, -1);
            FormDataView <double> f = new FormDataView <double>(new[] { confusionMatrix });

            f.Show();
            //dgvPerformance.DataSource = new[] { confusionMatrix };
        }
コード例 #7
0
        public void LearnTest()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] xor =
            {
                -1,
                1,
                1,
                -1
            };

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length);

            // Create the sequential minimal optimization teacher
            SequentialMinimalOptimization learn = new SequentialMinimalOptimization(machine, inputs, xor);

            // Run the learning algorithm
            learn.Run();


            int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p)));

            for (int i = 0; i < output.Length; i++)
            {
                Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
            }
        }
コード例 #8
0
        public void LearnTest2()
        {
            var dataset = new YinYang();

            double[][] inputs  = dataset.Instances;
            int[]      outputs = dataset.ClassLabels.Apply(x => x ? 1 : -1);

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(3), inputs[0].Length);

            // Create the Least Squares Support Vector Machine teacher
            LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, outputs);

            learn.Complexity = 1 / 0.1;

            // Run the learning algorithm
            learn.Run();


            int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p)));

            for (int i = 0; i < output.Length; i++)
            {
                Assert.AreEqual(System.Math.Sign(outputs[i]), System.Math.Sign(output[i]));
            }
        }
コード例 #9
0
        private void btnTestingRun_Click(object sender, EventArgs e)
        {
            if (svm == null || dgvTestingSource.DataSource == null)
            {
                MessageBox.Show("Please create a machine first.");
                return;
            }


            // Creates a matrix from the source data table
            double[,] sourceMatrix = (dgvTestingSource.DataSource as DataTable).ToMatrix();


            // Extract inputs
            double[][] inputs = new double[sourceMatrix.GetLength(0)][];
            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = new double[] { sourceMatrix[i, 0], sourceMatrix[i, 1] }
            }
            ;

            // Get only the label outputs
            int[] expected = new int[sourceMatrix.GetLength(0)];
            for (int i = 0; i < expected.Length; i++)
            {
                expected[i] = (int)sourceMatrix[i, 2];
            }

            // Compute the machine outputs
            int[] output = new int[expected.Length];
            for (int i = 0; i < expected.Length; i++)
            {
                output[i] = System.Math.Sign(svm.Compute(inputs[i]));
            }

            double[] expectedd = new double[expected.Length];
            double[] outputd   = new double[expected.Length];
            for (int i = 0; i < expected.Length; i++)
            {
                expectedd[i] = expected[i];
                outputd[i]   = output[i];
            }

            // Use confusion matrix to compute some statistics.
            ConfusionMatrix confusionMatrix = new ConfusionMatrix(output, expected, 1, -1);

            dgvPerformance.DataSource = new List <ConfusionMatrix> {
                confusionMatrix
            };

            foreach (DataGridViewColumn col in dgvPerformance.Columns)
            {
                col.Visible = true;
            }
            Column1.Visible = Column2.Visible = false;

            // Create performance scatterplot
            CreateResultScatterplot(zedGraphControl1, inputs, expectedd, outputd);
        }
コード例 #10
0
        public void ComputeTest5()
        {
            var dataset = SequentialMinimalOptimizationTest.GetYingYang();
            var inputs  = dataset.Submatrix(null, 0, 1).ToJagged();
            var labels  = dataset.GetColumn(2).ToInt32();

            var kernel = new Polynomial(2, 0);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);
                smo.UseComplexityHeuristic = true;

                double error = smo.Run();
                Assert.AreEqual(0.2, error);

                Assert.AreEqual(0.11714451552090824, smo.Complexity);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(20, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives);
                Assert.AreEqual(30, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives);
            }

            {
                Accord.Math.Tools.SetupGenerator(0);

                var projection = inputs.Apply(kernel.Transform);
                var machine    = new SupportVectorMachine(projection[0].Length);
                var smo        = new LinearNewtonMethod(machine, projection, labels);
                smo.UseComplexityHeuristic = true;

                double error = smo.Run();
                Assert.AreEqual(0.18, error);

                Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(projection[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(17, matrix.FalseNegatives);
                Assert.AreEqual(1, matrix.FalsePositives);
                Assert.AreEqual(33, matrix.TruePositives);
                Assert.AreEqual(49, matrix.TrueNegatives);
            }
        }
コード例 #11
0
        public void ComputeTest()
        {
            // Example AND problem
            double[][] inputs =
            {
                new double[] { 0, 0 }, // 0 and 0: 0 (label -1)
                new double[] { 0, 1 }, // 0 and 1: 0 (label -1)
                new double[] { 1, 0 }, // 1 and 0: 0 (label -1)
                new double[] { 1, 1 }  // 1 and 1: 1 (label +1)
            };

            // Dichotomy SVM outputs should be given as [-1;+1]
            int[] labels =
            {
                // 0,  0,  0, 1
                -1, -1, -1, 1
            };

            // Create a Support Vector Machine for the given inputs
            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);

            // Instantiate a new learning algorithm for SVMs
            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(machine, inputs, labels);

            // Set up the learning algorithm
            smo.Complexity = 1.0;

            // Run
            double error = smo.Run();

            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0])));
            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1])));
            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2])));
            Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3])));

            Assert.AreEqual(error, 0);

            Assert.AreEqual(-0.6640625, machine.Threshold);
            Assert.AreEqual(1, machine.Weights[0]);
            Assert.AreEqual(-0.34375, machine.Weights[1]);
            Assert.AreEqual(-0.328125, machine.Weights[2]);
            Assert.AreEqual(-0.328125, machine.Weights[3]);
        }
コード例 #12
0
        public void FixedWeightsTest()
        {
            var dataset = KernelSupportVectorMachineTest.training;
            var inputs  = dataset.Submatrix(null, 0, 3);
            var labels  = Accord.Math.Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();

            var machine = new KernelSupportVectorMachine(
                Gaussian.Estimate(inputs), inputs[0].Length);

            var smo = new SequentialMinimalOptimization(machine, inputs, labels);

            smo.Complexity = 10;

            double error = smo.Run();

            Assert.AreEqual(0.19047619047619047, error);
            Assert.AreEqual(265.78327637381551, ((Gaussian)machine.Kernel).Sigma);
            Assert.AreEqual(29, machine.SupportVectors.Length);

            double[] expectedWeights =
            {
                1.65717694716503,    1.20005456611466, -5.70824245415995,                10,
                10,                 -2.38755497916487,                10, -8.15723436363058,              10, -10, 10,
                10,                -0.188634936781317,  -5.4354281009458, -8.48341139483265,
                -5.91105702760141,  -5.71489190049223,                10, -2.37289205235858,
                -3.33031262413522,  -1.97545116517677,                10,               -10, -9.563186799279,
                -3.917941544845,   -0.532584110773336,  4.81951847548326, 0.343668292727091,
                -4.34159482731336
            };

            //Assert.IsTrue(expectedWeights.IsEqual(machine.Weights, 1e-5));

            int[] actual = new int[labels.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = Math.Sign(machine.Compute(inputs[i]));
            }

            ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

            Assert.AreEqual(8, matrix.FalseNegatives);
            Assert.AreEqual(0, matrix.FalsePositives);
            Assert.AreEqual(4, matrix.TruePositives);
            Assert.AreEqual(30, matrix.TrueNegatives);

            Assert.AreEqual(1 / 3.0, matrix.Sensitivity);
            Assert.AreEqual(1, matrix.Specificity);

            Assert.AreEqual(0.5, matrix.FScore);
            Assert.AreEqual(0.5129891760425771, matrix.MatthewsCorrelationCoefficient);
        }
コード例 #13
0
        public void RunTest3()
        {
            // Example XOR problem
            double[][] inputs =
            {
                new double[] { 0, 0 }, // 0 xor 0: 1 (label +1)
                new double[] { 0, 1 }, // 0 xor 1: 0 (label -1)
                new double[] { 1, 0 }, // 1 xor 0: 0 (label -1)
                new double[] { 1, 1 }  // 1 xor 1: 1 (label +1)
            };

            // Dichotomy SVM outputs should be given as [-1;+1]
            int[] labels =
            {
                1, -1, -1, 1
            };

            // Create a Kernel Support Vector Machine for the given inputs
            KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);

            // Instantiate a new learning algorithm for SVMs
            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, labels);

            // Set up the learning algorithm
            smo.Complexity = 1.0;

            // Run the learning algorithm
            double error = smo.Run();

            Assert.IsFalse(svm.IsProbabilistic);

            // Instantiate the probabilistic learning calibration
            var calibration = new ProbabilisticOutputCalibration(svm, inputs, labels);

            // Run the calibration algorithm
            double loglikelihood = calibration.Run();

            Assert.IsTrue(svm.IsProbabilistic);

            // Compute the decision output for one of the input vectors,
            // while also retrieving the probability of the answer

            double probability;
            int    decision = svm.Compute(inputs[0], out probability);

            // At this point, decision is +1 with a probability of 75%

            Assert.AreEqual(1, decision);
            Assert.AreEqual(0.74999975815069375, probability, 1e-10);
        }
コード例 #14
0
        public void TransformTest()
        {
            var inputs = yinyang.Submatrix(null, 0, 1).ToJagged();
            var labels = yinyang.GetColumn(2).ToInt32();

            ConfusionMatrix actual, expected;
            SequentialMinimalOptimization a, b;

            var kernel = new Polynomial(2, 0);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                a = new SequentialMinimalOptimization(machine, inputs, labels);
                a.UseComplexityHeuristic = true;
                a.Run();

                int[] values = new int[labels.Length];
                for (int i = 0; i < values.Length; i++)
                {
                    values[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                expected = new ConfusionMatrix(values, labels);
            }

            {
                var projection = inputs.Apply(kernel.Transform);
                var machine    = new SupportVectorMachine(projection[0].Length);
                b = new SequentialMinimalOptimization(machine, projection, labels);
                b.UseComplexityHeuristic = true;
                b.Run();

                int[] values = new int[labels.Length];
                for (int i = 0; i < values.Length; i++)
                {
                    values[i] = Math.Sign(machine.Compute(projection[i]));
                }

                actual = new ConfusionMatrix(values, labels);
            }

            Assert.AreEqual(a.Complexity, b.Complexity, 1e-15);
            Assert.AreEqual(expected.TrueNegatives, actual.TrueNegatives);
            Assert.AreEqual(expected.TruePositives, actual.TruePositives);
            Assert.AreEqual(expected.FalseNegatives, actual.FalseNegatives);
            Assert.AreEqual(expected.FalsePositives, actual.FalsePositives);
        }
コード例 #15
0
        private static void testWeights(double[][] inputs, int[] labels, IKernel kernel)
        {
            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.PositiveWeight = 100;
                smo.NegativeWeight = 1;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(50, matrix.TruePositives); // has more importance
                Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.PositiveWeight = 1;
                smo.NegativeWeight = 100;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                var matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(50, matrix.TrueNegatives); // has more importance
                Assert.AreEqual(0, matrix.FalsePositives); // has more importance
            }
        }
コード例 #16
0
        public void ComputeTest5()
        {
            double[][] inputs = training.Submatrix(null, 0, 3);

            int[] labels = Tools.Scale(0, 1, -1, 1, training.GetColumn(4)).ToInt32();

            Gaussian kernel = Gaussian.Estimate(inputs);

            var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);

            var smo = new SequentialMinimalOptimization(machine, inputs, labels);


            smo.Complexity          = 1.0;
            smo.UseClassProportions = true;

            double error = smo.Run();

            Assert.AreEqual(1, smo.Complexity);
            Assert.AreEqual(0.4, smo.PositiveWeight);
            Assert.AreEqual(1.0, smo.NegativeWeight);
            Assert.AreEqual(0.4, smo.WeightRatio, 1e-10);
            Assert.AreEqual(0.38095238095238093, error);
            Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
            Assert.AreEqual(32, machine.SupportVectors.Length);


            int[] actual = new int[labels.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = Math.Sign(machine.Compute(inputs[i]));
            }

            ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

            Assert.AreEqual(7, matrix.FalseNegatives);
            Assert.AreEqual(9, matrix.FalsePositives);
            Assert.AreEqual(5, matrix.TruePositives);
            Assert.AreEqual(21, matrix.TrueNegatives);

            Assert.AreEqual(0.41666666666666669, matrix.Sensitivity);
            Assert.AreEqual(0.7, matrix.Specificity);
        }
コード例 #17
0
        private void btnTestingRun_Click(object sender, EventArgs e)
        {
            if (svm == null || dgvTestingSource.DataSource == null)
            {
                MessageBox.Show("Please create a machine first.");
                return;
            }


            // Creates a matrix from the source data table
            double[,] table = (dgvTestingSource.DataSource as DataTable).ToMatrix();


            // Extract the first columns (X)
            double[][] inputs = table.GetColumns(0).ToArray();

            // Extract the expected output values
            double[] expected = table.GetColumn(1);

            // Compute the actual machine outputs
            var output = new double[expected.Length];

            for (int i = 0; i < expected.Length; i++)
            {
                output[i] = svm.Compute(inputs[i]);
            }


            // Compute R² and Sum-of-squares error
            double rSquared = Accord.Statistics.Tools.Determination(output, expected);
            double error    = Elementwise.Pow(expected.Subtract(output), 2).Sum() / output.Length;


            // Anonymous magic! :D
            var r = new { RSquared = rSquared, Error = error };

            dgvPerformance.DataSource = (new[] { r }).ToList();


            // Create performance scatter plot
            CreateResultScatterplot(zedGraphControl1, inputs, expected, output);
        }
コード例 #18
0
        public void SVMTestXOR()
        {
            List <DataSet> dataset = new List <DataSet>();

            dataset.Add(new DataSet(-1, new double[] { -1, -1 }));
            dataset.Add(new DataSet(1, new double[] { -1, 1 }));
            dataset.Add(new DataSet(1, new double[] { 1, -1 }));
            dataset.Add(new DataSet(-1, new double[] { 1, 1 }));

            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(
                new Polynomial(2), 2);

            var learn = new SequentialMinimalOptimization(machine, dataset.ToArray());

            double[] error = learn.Run();

            double[] output   = machine.Compute(dataset.ToArray());
            double[] expected = { -1, 1, 1, -1 };

            CollectionAssert.AreEqual(expected, output);
        }
コード例 #19
0
ファイル: MainForm.cs プロジェクト: haf/Accord.Net
        /// <summary>
        ///   Tests the previously created machine into a new set of data.
        /// </summary>
        ///
        private void btnTestingRun_Click(object sender, EventArgs e)
        {
            if (svm == null || dgvTestingSource.DataSource == null)
            {
                MessageBox.Show("Please create a machine first.");
                return;
            }


            // Creates a matrix from the source data table
            double[,] table = (dgvTestingSource.DataSource as DataTable).ToMatrix();


            // Extract the first and second columns (X and Y)
            double[][] inputs = table.GetColumns(0, 1).ToArray();

            // Extract the expected output labels
            int[] expected = table.GetColumn(2).ToInt32();


            int[] output = new int[expected.Length];

            // Compute the actual machine outputs
            for (int i = 0; i < expected.Length; i++)
            {
                output[i] = System.Math.Sign(svm.Compute(inputs[i]));
            }



            // Use confusion matrix to compute some performance metrics
            ConfusionMatrix confusionMatrix = new ConfusionMatrix(output, expected, 1, -1);

            dgvPerformance.DataSource = new [] { confusionMatrix };


            // Create performance scatter plot
            CreateResultScatterplot(zedGraphControl1, inputs, expected.ToDouble(), output.ToDouble());
        }
コード例 #20
0
        public void LearnTest()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] xor =
            {
                -1,
                1,
                1,
                -1
            };

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length);

            // Create the Least Squares Support Vector Machine teacher
            LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, xor);

            learn.Complexity = 10;

            // Run the learning algorithm
            learn.Run();


            int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p)));

            for (int i = 0; i < output.Length; i++)
            {
                Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
            }
        }
コード例 #21
0
        public void UseClassProportionsTest()
        {
            var dataset = KernelSupportVectorMachineTest.training;
            var inputs  = dataset.Submatrix(null, 0, 3);
            var labels  = Accord.Math.Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();

            Gaussian kernel  = Gaussian.Estimate(inputs);
            var      machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
            var      smo     = new SequentialMinimalOptimization(machine, inputs, labels);

            smo.Complexity          = 1.0;
            smo.UseClassProportions = true;

            double error = smo.Run();

            Assert.AreEqual(1, smo.Complexity);
            Assert.AreEqual(0.4, smo.PositiveWeight);
            Assert.AreEqual(1.0, smo.NegativeWeight);
            Assert.AreEqual(0.4, smo.WeightRatio, 1e-10);
            Assert.AreEqual(0.2857142857142857, error);
            Assert.AreEqual(265.78327637381551, ((Gaussian)machine.Kernel).Sigma);
            Assert.AreEqual(26, machine.SupportVectors.Length);


            int[] actual = new int[labels.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = Math.Sign(machine.Compute(inputs[i]));
            }

            ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

            Assert.AreEqual(12, matrix.FalseNegatives);
            Assert.AreEqual(0, matrix.FalsePositives);
            Assert.AreEqual(0, matrix.TruePositives);
            Assert.AreEqual(30, matrix.TrueNegatives);
        }
コード例 #22
0
        public void LearnTest2()
        {
            double[][] inputs  = yinyang.Submatrix(null, 0, 1).ToJagged();
            int[]      outputs = yinyang.GetColumn(2).ToInt32();

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(3), inputs[0].Length);

            // Create the Least Squares Support Vector Machine teacher
            LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, outputs);

            learn.Complexity = 1 / 0.1;

            // Run the learning algorithm
            learn.Run();


            int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p)));

            for (int i = 0; i < output.Length; i++)
            {
                Assert.AreEqual(System.Math.Sign(outputs[i]), System.Math.Sign(output[i]));
            }
        }
コード例 #23
0
        public void ComputeTest5()
        {
            var dataset = yinyang;

            double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
            int[]      labels = dataset.GetColumn(2).ToInt32();

            {
                Linear kernel  = new Linear();
                var    machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var    smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity = 1.0;

                double error = smo.Run();

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(1.0, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.14, error);
                Assert.AreEqual(30, machine.SupportVectors.Length);

                double[] actualWeights   = machine.Weights;
                double[] expectedWeights = { -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 0.337065120144639, -1, 1, -0.337065120144639, -1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1 };
                Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(7, matrix.FalseNegatives);
                Assert.AreEqual(7, matrix.FalsePositives);
                Assert.AreEqual(43, matrix.TruePositives);
                Assert.AreEqual(43, matrix.TrueNegatives);
            }

            {
                Linear kernel  = new Linear();
                var    machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var    smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity     = 1.0;
                smo.PositiveWeight = 0.3;
                smo.NegativeWeight = 1.0;

                double error = smo.Run();

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(0.3 / 1.0, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(0.3, smo.PositiveWeight);
                Assert.AreEqual(0.21, error);
                Assert.AreEqual(24, machine.SupportVectors.Length);

                double[] actualWeights = machine.Weights;
                //string str = actualWeights.ToString(Accord.Math.Formats.CSharpArrayFormatProvider.InvariantCulture);
                double[] expectedWeights = { -0.771026323762095, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -0.928973676237905, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 };
                Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = (int)machine.Compute(inputs[i]);
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(50, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives);
                Assert.AreEqual(0, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives);
            }

            {
                Linear kernel  = new Linear();
                var    machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var    smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity     = 1.0;
                smo.PositiveWeight = 1.0;
                smo.NegativeWeight = 0.3;

                double error = smo.Run();

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(1.0 / 0.3, smo.WeightRatio);
                Assert.AreEqual(0.3, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.15, error);
                Assert.AreEqual(19, machine.SupportVectors.Length);

                double[] actualWeights   = machine.Weights;
                double[] expectedWeights = new double[] { 1, 1, -0.3, 1, -0.3, 1, 1, -0.3, 1, 1, 1, 1, 1, 1, 1, 1, 0.129080057278249, 1, 0.737797469918795 };
                Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(0, matrix.FalseNegatives);
                Assert.AreEqual(50, matrix.FalsePositives);
                Assert.AreEqual(50, matrix.TruePositives);
                Assert.AreEqual(0, matrix.TrueNegatives);
            }
        }
コード例 #24
0
        public void ComputeTest()
        {
            // Example AND problem
            double[][] inputs =
            {
                new double[] { 0, 0 }, // 0 and 0: 0 (label -1)
                new double[] { 0, 1 }, // 0 and 1: 0 (label -1)
                new double[] { 1, 0 }, // 1 and 0: 0 (label -1)
                new double[] { 1, 1 }  // 1 and 1: 1 (label +1)
            };

            // Dichotomy SVM outputs should be given as [-1;+1]
            int[] labels =
            {
                // 0,  0,  0, 1
                -1, -1, -1, 1
            };

            // Create a Support Vector Machine for the given inputs
            KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Linear(0), inputs[0].Length);

            // Instantiate a new learning algorithm for SVMs
            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(machine, inputs, labels);

            // Set up the learning algorithm
            smo.Complexity = 100.0;

            // Run
            double error = smo.Run();

            Assert.AreEqual(0, error);
            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0])));
            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1])));
            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2])));
            Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3])));

            // At this point we have the weighted support vectors
            //     w        sv        b
            //   (+4)  *  (1,1)      -3
            //   (-2)  *  (1,0)
            //   (-2)  *  (0,1)
            //
            // However, it can be seen that the last SV can be written
            // as a linear combination of the two first vectors:
            //
            //   (0,1) = (1,1) - (1,0)
            //
            // Since we have a linear space (we are using a linear kernel)
            // this vector could be removed from the support vector set.
            //
            // f(x) = sum(alpha_i * x * x_i) + b
            //      = 4*(1,1)*x - 2*(1,0)*x - 2*(0,1)*x             - 3
            //      = 4*(1,1)*x - 2*(1,0)*x - 2*((1,1) - (1,0))*x   - 3
            //      = 4*(1,1)*x - 2*(1,0)*x - 2*(1,1)*x + 2*(1,0)*x - 3
            //      = 4*(1,1)*x - 2*(1,0)*x - 2*(1,1)*x + 2*(1,0)*x - 3
            //      = 2*(1,1)*x - 3
            //      = 2*x1 + 2*x2 - 3
            //

            SupportVectorReduction svr = new SupportVectorReduction(machine);

            double error2 = svr.Run();


            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0])));
            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1])));
            Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2])));
            Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3])));
        }
コード例 #25
0
        public void DynamicalTimeWarpingConstructorTest()
        {
            double[][] sequences =
            {
                new double[] // -1
                {
                    0, 0, 0,
                    1, 1, 1,
                    2, 2, 2,
                },

                new double[] // -1
                {
                    0, 1, 0,
                    0, 2, 0,
                    0, 3, 0
                },

                new double[] // +1
                {
                    1, 1, 0,
                    1, 2, 0,
                    2, 1, 0,
                },

                new double[] // +1
                {
                    0, 0, 1,
                    0, 0, 2,
                    0, 1, 3,
                },
            };

            int[] outputs = { -1, -1, +1, +1 };


            // Set the parameters of the kernel
            double alpha             = 0.85;
            int    innerVectorLength = 3;


            // Create the kernel. Note that the input vector will be given out automatically
            DynamicTimeWarping target = new DynamicTimeWarping(innerVectorLength, alpha);



            // When using variable-length kernels, specify 0 as the input length.
            KernelSupportVectorMachine svm = new KernelSupportVectorMachine(target, 0);

            // Create the Sequential Minimal Optimization as usual
            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, sequences, outputs);

            smo.Complexity = 1.5;
            double error = smo.Run();


            // Computing the training values
            var a0 = svm.Compute(sequences[0]);
            var a1 = svm.Compute(sequences[1]);
            var a2 = svm.Compute(sequences[2]);
            var a3 = svm.Compute(sequences[3]);

            Assert.AreEqual(-1, System.Math.Sign(a0));
            Assert.AreEqual(-1, System.Math.Sign(a1));
            Assert.AreEqual(+1, System.Math.Sign(a2));
            Assert.AreEqual(+1, System.Math.Sign(a3));



            // Computing a new testing value
            double[] test =
            {
                1, 0, 1,
                0, 0, 2,
                0, 1, 3,
            };

            var a4 = svm.Compute(test);
        }
コード例 #26
0
        public void RunTest1()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] outputs =
            {
                -1,
                1,
                1,
                -1
            };

            KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(3.6), 2);

            var smo = new SequentialMinimalOptimization(svm, inputs, outputs);

            double error1 = smo.Run();

            Assert.AreEqual(0, error1);

            double[] distances = new double[outputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                int y = svm.Compute(inputs[i], out distances[i]);
                Assert.AreEqual(outputs[i], y);
            }


            var target = new ProbabilisticOutputCalibration(svm, inputs, outputs);

            double ll0 = target.LogLikelihood(inputs, outputs);

            double ll1 = target.Run();

            double ll2 = target.LogLikelihood(inputs, outputs);

            Assert.AreEqual(5.5451735748694571, ll1);
            Assert.AreEqual(ll1, ll2);
            Assert.IsTrue(ll1 > ll0);

            double[] newdistances = new double[outputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                int y = svm.Compute(inputs[i], out newdistances[i]);
                Assert.AreEqual(outputs[i], y);
            }

            double[] probs = new double[outputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                int y;
                probs[i] = svm.ToMulticlass().Probability(inputs[i], out y);
                Assert.AreEqual(outputs[i], y == 1 ? 1 : -1);
            }

            Assert.AreEqual(0.25, probs[0], 1e-5);
            Assert.AreEqual(0.75, probs[1], 1e-5);
            Assert.AreEqual(0.75, probs[2], 1e-5);
            Assert.AreEqual(0.25, probs[3], 1e-5);

            foreach (var p in probs)
            {
                Assert.IsFalse(Double.IsNaN(p));
            }
        }
コード例 #27
0
        public void weight_test_inhomogeneous_linear_kernel()
        {
            var dataset = yinyang;

            double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged();
            int[]      labels = dataset.GetColumn(2).ToInt32();

            Accord.Math.Tools.SetupGenerator(0);

            var kernel = new Linear(1);

            Assert.AreEqual(kernel.Constant, 1);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity     = 1.0;
                smo.PositiveWeight = 1;
                smo.NegativeWeight = 1;
                smo.Tolerance      = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(43, matrix.TruePositives); // both classes are
                Assert.AreEqual(43, matrix.TrueNegatives); // well equilibrated
                Assert.AreEqual(7, matrix.FalseNegatives);
                Assert.AreEqual(7, matrix.FalsePositives);

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(1.0, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.14, error);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(31, machine.SupportVectors.Length);

                machine.Compress();
                Assert.AreEqual(1, machine.Weights[0]);
                Assert.AreEqual(1, machine.SupportVectors.Length);
                Assert.AreEqual(-1.3107402300323954, machine.SupportVectors[0][0], 1e-3);
                Assert.AreEqual(-0.5779471529948812, machine.SupportVectors[0][1], 1e-3);
                Assert.AreEqual(-1.5338510320418068, machine.Threshold, 1e-3);
                for (int i = 0; i < actual.Length; i++)
                {
                    int expected = actual[i];
                    int y        = Math.Sign(machine.Compute(inputs[i]));
                    Assert.AreEqual(expected, y);
                }
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity     = 1;
                smo.PositiveWeight = 100;
                smo.NegativeWeight = 1;
                smo.Tolerance      = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(50, matrix.TruePositives); // has more importance
                Assert.AreEqual(23, matrix.TrueNegatives);
                Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
                Assert.AreEqual(27, matrix.FalsePositives);

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(100, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(100, smo.PositiveWeight);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(0.27, error);
                Assert.AreEqual(41, machine.SupportVectors.Length);
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity     = 1;
                smo.PositiveWeight = 1;
                smo.NegativeWeight = 100;
                smo.Tolerance      = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                var matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(25, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives); // has more importance
                Assert.AreEqual(25, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives); // has more importance

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(0.01, smo.WeightRatio);
                Assert.AreEqual(100, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.25, error);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(40, machine.SupportVectors.Length);
            }
        }
コード例 #28
0
        public void WeightRatioTest()
        {
            var dataset = KernelSupportVectorMachineTest.training;
            var inputs  = dataset.Submatrix(null, 0, 3);
            var labels  = Accord.Math.Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();

            Gaussian kernel = Gaussian.Estimate(inputs);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity  = 1.0;
                smo.WeightRatio = 10;

                double error = smo.Run();

                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.1, smo.NegativeWeight);
                Assert.AreEqual(0.7142857142857143, error);
                Assert.AreEqual(265.78327637381551, ((Gaussian)machine.Kernel).Sigma);
                Assert.AreEqual(39, machine.SupportVectors.Length);


                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(12, matrix.TruePositives); // has more importance
                Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
                Assert.AreEqual(30, matrix.FalsePositives);
                Assert.AreEqual(0, matrix.TrueNegatives);

                Assert.AreEqual(1.0, matrix.Sensitivity);
                Assert.AreEqual(0.0, matrix.Specificity);

                Assert.AreEqual(0.44444444444444448, matrix.FScore);
                Assert.AreEqual(0.0, matrix.MatthewsCorrelationCoefficient);
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity  = 1.0;
                smo.WeightRatio = 0.1;

                double error = smo.Run();

                Assert.AreEqual(0.1, smo.PositiveWeight);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(0.21428571428571427, error);
                Assert.AreEqual(265.78327637381551, ((Gaussian)machine.Kernel).Sigma);
                Assert.AreEqual(18, machine.SupportVectors.Length);


                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(8, matrix.FalseNegatives);
                Assert.AreEqual(1, matrix.FalsePositives); // has more importance
                Assert.AreEqual(4, matrix.TruePositives);
                Assert.AreEqual(29, matrix.TrueNegatives); // has more importance

                Assert.AreEqual(0.33333333333333331, matrix.Sensitivity);
                Assert.AreEqual(0.96666666666666667, matrix.Specificity);

                Assert.AreEqual(0.47058823529411764, matrix.FScore);
                Assert.AreEqual(0.41849149947774944, matrix.MatthewsCorrelationCoefficient);
            }
        }
コード例 #29
0
        public void DynamicalTimeWarpingConstructorTest3()
        {
            // Suppose you have sequences of multivariate observations, and that
            // those sequences could be of arbitrary length. On the other hand,
            // each observation have a fixed, delimited number of dimensions.

            // In this example, we have sequences of 3-dimensional observations.
            // Each sequence can have an arbitrary length, but each observation
            // will always have length 3:

            double[][][] sequences =
            {
                new double[][]                // first sequence
                {
                    new double[] { 1, 1, 1 }, // first observation of the first sequence
                    new double[] { 1, 2, 1 }, // second observation of the first sequence
                    new double[] { 1, 4, 2 }, // third observation of the first sequence
                    new double[] { 2, 2, 2 }, // fourth observation of the first sequence
                },

                new double[][]                // second sequence (note that this sequence has a different length)
                {
                    new double[] { 1, 1, 1 }, // first observation of the second sequence
                    new double[] { 1, 5, 6 }, // second observation of the second sequence
                    new double[] { 2, 7, 1 }, // third observation of the second sequence
                },

                new double[][]                // third sequence
                {
                    new double[] { 8, 2, 1 }, // first observation of the third sequence
                },

                new double[][]                // fourth sequence
                {
                    new double[] { 8, 2, 5 }, // first observation of the fourth sequence
                    new double[] { 1, 5, 4 }, // second observation of the fourth sequence
                }
            };

            // Now, we will also have different class labels associated which each
            // sequence. We will assign -1 to sequences whose observations start
            // with { 1, 1, 1 } and +1 to those that do not:

            int[] outputs =
            {
                -1, -1,  // First two sequences are of class -1 (those start with {1,1,1})
                1, 1,    // Last two sequences are of class +1  (don't start with {1,1,1})
            };

            // At this point, we will have to "flat" out the input sequences from double[][][]
            // to a double[][] so they can be properly understood by the SVMs. The problem is
            // that, normally, SVMs usually expect the data to be comprised of fixed-length
            // input vectors and associated class labels. But in this case, we will be feeding
            // them arbitrary-length sequences of input vectors and class labels associated with
            // each sequence, instead of each vector.

            double[][] inputs = new double[sequences.Length][];
            for (int i = 0; i < sequences.Length; i++)
            {
                inputs[i] = Matrix.Concatenate(sequences[i]);
            }


            // Now we have to setup the Dynamic Time Warping kernel. We will have to
            // inform the length of the fixed-length observations contained in each
            // arbitrary-length sequence:
            //
            DynamicTimeWarping kernel = new DynamicTimeWarping(length: 3);

            // Now we can create the machine. When using variable-length
            // kernels, we will need to pass zero as the input length:
            var svm = new KernelSupportVectorMachine(kernel, inputs: 0);

            // Create the Sequential Minimal Optimization learning algorithm
            var smo = new SequentialMinimalOptimization(svm, inputs, outputs)
            {
                Complexity = 1.5
            };


            // And start learning it!
            double error = smo.Run(); // error will be 0.0


            // At this point, we should have obtained an useful machine. Let's
            // see if it can understand a few examples it hasn't seem before:

            double[][] a =
            {
                new double[] { 1, 1, 1 },
                new double[] { 7, 2, 5 },
                new double[] { 2, 5, 1 },
            };

            double[][] b =
            {
                new double[] { 7, 5, 2 },
                new double[] { 4, 2, 5 },
                new double[] { 1, 1, 1 },
            };

            // Following the aforementioned logic, sequence (a) should be
            // classified as -1, and sequence (b) should be classified as +1.

            int resultA = System.Math.Sign(svm.Compute(Matrix.Concatenate(a))); // -1
            int resultB = System.Math.Sign(svm.Compute(Matrix.Concatenate(b))); // +1


            Assert.AreEqual(0, error);
            Assert.AreEqual(-1, resultA);
            Assert.AreEqual(+1, resultB);
        }
コード例 #30
0
        public void WeightsTest1()
        {
            var dataset = yinyang;

            double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
            int[]      labels = dataset.GetColumn(2).ToInt32();

            Accord.Math.Tools.SetupGenerator(0);

            var kernel = new Linear(1);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity     = 1.0;
                smo.PositiveWeight = 1;
                smo.NegativeWeight = 1;
                smo.Tolerance      = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(43, matrix.TruePositives); // both classes are
                Assert.AreEqual(43, matrix.TrueNegatives); // well equilibrated
                Assert.AreEqual(7, matrix.FalseNegatives);
                Assert.AreEqual(7, matrix.FalsePositives);

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(1.0, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.14, error);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(31, machine.SupportVectors.Length);
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity     = 1;
                smo.PositiveWeight = 100;
                smo.NegativeWeight = 1;
                smo.Tolerance      = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(50, matrix.TruePositives); // has more importance
                Assert.AreEqual(23, matrix.TrueNegatives);
                Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
                Assert.AreEqual(27, matrix.FalsePositives);

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(100, smo.WeightRatio);
                Assert.AreEqual(1.0, smo.NegativeWeight);
                Assert.AreEqual(100, smo.PositiveWeight);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(0.27, error);
                Assert.AreEqual(41, machine.SupportVectors.Length);
            }

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo     = new SequentialMinimalOptimization(machine, inputs, labels);

                smo.Complexity     = 1;
                smo.PositiveWeight = 1;
                smo.NegativeWeight = 100;
                smo.Tolerance      = 0.001;

                double error = smo.Run();

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                {
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));
                }

                var matrix = new ConfusionMatrix(actual, labels);

                Assert.AreEqual(25, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives); // has more importance
                Assert.AreEqual(25, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives); // has more importance

                Assert.AreEqual(1.0, smo.Complexity);
                Assert.AreEqual(0.01, smo.WeightRatio);
                Assert.AreEqual(100, smo.NegativeWeight);
                Assert.AreEqual(1.0, smo.PositiveWeight);
                Assert.AreEqual(0.25, error);
                Assert.AreEqual(0.001, smo.Tolerance);
                Assert.AreEqual(40, machine.SupportVectors.Length);
            }
        }