Polynomial Kernel
Inheritance: IKernel
        public void DistanceTest()
        {
            Polynomial target = new Polynomial(1);

            double[] x = new double[] { 1, 1 };
            double[] y = new double[] { 1, 1 };

            double expected = 0;
            double actual = target.Distance(x, y);
            Assert.AreEqual(expected, actual);


            x = new double[] { 0.5, 2.0 };
            y = new double[] { 1.3, -0.2 };

            expected = 5.48;
            actual = target.Distance(x, y);

            Assert.AreEqual(expected, actual);


            target = new Polynomial(3);

            x = new double[] { 9.4, 22.1 };
            y = new double[] { -6.21, 4 };

            expected = 192981940.60611719;
            actual = target.Distance(x, y);
            Assert.AreEqual(expected, actual);
        }
        public void ComputeTest5()
        {
            var dataset = SequentialMinimalOptimizationTest.yinyang;

            double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
            int[] labels = dataset.GetColumn(2).ToInt32();

            var kernel = new Polynomial(2, 1);

            Accord.Math.Tools.SetupGenerator(0);
            var projection = inputs.Apply(kernel.Transform);
            var machine = new SupportVectorMachine(projection[0].Length);
            var smo = new LinearCoordinateDescent(machine, projection, labels)
            {
                Complexity = 1000000,
                Tolerance = 1e-15
            };

            double error = smo.Run();

            Assert.AreEqual(1000000.0, smo.Complexity, 1e-15);

            int[] actual = new int[labels.Length];
            for (int i = 0; i < actual.Length; i++)
                actual[i] = Math.Sign(machine.Compute(projection[i]));

            ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
            Assert.AreEqual(6, matrix.FalseNegatives);
            Assert.AreEqual(7, matrix.FalsePositives);
            Assert.AreEqual(44, matrix.TruePositives);
            Assert.AreEqual(43, matrix.TrueNegatives);
        }
        public void DistanceTest()
        {
            Polynomial dense = new Polynomial(3);
            SparsePolynomial target = new SparsePolynomial(3);

            double[] sx = { 1, -0.555556, 2, +0.250000, 3, -0.864407, 4, -0.916667 };
            double[] sy = { 1, -0.666667, 2, -0.166667, 3, -0.864407, 4, -0.916667 };
            double[] sz = { 1, -0.944444, 3, -0.898305, 4, -0.916667 };

            double[] dx = { -0.555556, +0.250000, -0.864407, -0.916667 };
            double[] dy = { -0.666667, -0.166667, -0.864407, -0.916667 };
            double[] dz = { -0.944444, +0.000000, -0.898305, -0.916667 };

            double expected, actual;

            expected = dense.Distance(dx, dy);
            actual = target.Distance(sx, sy);
            Assert.AreEqual(expected, actual, 1e-10);

            expected = dense.Distance(dx, dz);
            actual = target.Distance(sx, sz);
            Assert.AreEqual(expected, actual, 1e-10);

            expected = dense.Distance(dy, dz);
            actual = target.Distance(sy, sz);
            Assert.AreEqual(expected, actual, 1e-10);
        }
        public void FunctionTest()
        {
            Polynomial target = new Polynomial(1, 0);

            double[] x = new double[] { 1, 1 };
            double[] y = new double[] { 1, 1 };

            double expected = 2;
            double actual = target.Function(x, y);
            Assert.AreEqual(expected, actual);


            x = new double[] { 0.5, 2.0 };
            y = new double[] { 1.3, -0.2 };

            expected = 0.25;
            actual = target.Function(x, y);

            Assert.AreEqual(expected, actual);


            target = new Polynomial(3, 0);

            x = new double[] { 9.4, 22.1 };
            y = new double[] { -6.21, 4 };

            expected = 27070.26085757601;
            actual = target.Function(x, y);
            Assert.AreEqual(expected, actual, 0.0001);
        }
        public void FunctionTest()
        {
            Polynomial target = new Polynomial(1, 0);

            double[] x = new double[] { 1, 1 };
            double[] y = new double[] { 1, 1 };

            double expected = 2;
            double actual = target.Function(x, y);
            Assert.AreEqual(expected, actual);


            x = new double[] { 0.5, 2.0 };
            y = new double[] { 1.3, -0.2 };

            expected = 0.25;
            actual = target.Function(x, y);

            Assert.AreEqual(expected, actual);


            target = new Polynomial(3, 0);

            x = new double[] { 9.4, 22.1 };
            y = new double[] { -6.21, 4 };

            expected = System.Math.Pow(x.InnerProduct(y), 3);
            actual = target.Function(x, y);
            Assert.AreEqual(expected, actual, 0.0001);
        }
        public void ComputeTest5()
        {
            var dataset = SequentialMinimalOptimizationTest.yinyang;
            var inputs = dataset.Submatrix(null, 0, 1).ToArray();
            var labels = dataset.GetColumn(2).ToInt32();

            var kernel = new Polynomial(2, 0);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);
                smo.UseComplexityHeuristic = true;

                double error = smo.Run();
                Assert.AreEqual(0.2, error);

                Assert.AreEqual(0.11714451552090824, smo.Complexity);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(20, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives);
                Assert.AreEqual(30, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives);
            }

            {
                Accord.Math.Tools.SetupGenerator(0);

                var projection = inputs.Apply(kernel.Transform);
                var machine = new SupportVectorMachine(projection[0].Length);
                var smo = new LinearNewtonMethod(machine, projection, labels);
                smo.UseComplexityHeuristic = true;

                double error = smo.Run();
                Assert.AreEqual(0.18, error);

                Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(projection[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(17, matrix.FalseNegatives);
                Assert.AreEqual(1, matrix.FalsePositives);
                Assert.AreEqual(33, matrix.TruePositives);
                Assert.AreEqual(49, matrix.TrueNegatives);
            }

        }
        //// Do training for all existing trained Data
        public SVM(string TrainedDataInputFile)
        {
            _engine = new TesseractEngine(@"./tessdata3", "eng", EngineMode.TesseractAndCube);
            _engine.SetVariable("tessedit_char_whitelist", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ");
            _engine.SetVariable("tessedit_char_blacklist", "¢§+~»~`!@#$%^&*()_+-={}[]|\\:\";\'<>?,./");

            string[] TrainedData = Directory.GetFiles(TrainedDataInputFile, "*.png");
            double[][] inputs = new double[TrainedData.Length][];   ///
            double[] InputArray = new double[784];
            int[] Outputs = new int[TrainedData.Length];

            for (int i = 0; i < TrainedData.Length; i++)
            {
                string filename = Path.GetFileNameWithoutExtension(TrainedData[i]);
                Bitmap TrainingImage = new Bitmap(TrainedData[i]);
                string[] split = filename.Split('.');
                for (int j = 0; j < 28; j++)
                {
                    for (int k = 0; k < 28; k++)
                    {
                        if ((!TrainingImage.GetPixel(j, k).Name.Equals("ffffffff")))
                            InputArray[j * 28 + k] = 1;
                        else
                            InputArray[j * 28 + k] = 0;
                    }
                }

                inputs[i] = InputArray;
                Outputs[i] = Convert.ToInt32(split[0]);
                InputArray = new double[784];
            }

            IKernel kernel;
            kernel = new Polynomial(2, 0);
            ksvm = new MulticlassSupportVectorMachine(784, kernel, 2);
            MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, inputs, Outputs);

            double complexity = 1;   ///// set these three parameters Carefuly later
            double epsilon = 0.001;
            double tolerance = 0.2;

            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs);
                smo.Complexity = complexity;  /// Cost parameter for SVM
                smo.Epsilon = epsilon;
                smo.Tolerance = tolerance;
                return smo;
            };

            // Train the machines. It should take a while.
            double error = ml.Run();
        }
Exemple #8
0
        public void Aprender(IDadosSinaisEstaticos dados)
        {
            var kernel = new Polynomial(degree: 3, constant: 1);
            svm = new MulticlassSupportVectorMachine(QuantidadeIndeterminadaDeCaracteristicas, kernel, dados.QuantidadeClasses);

            var teacher = new MulticlassSupportVectorLearning(svm, dados.CaracteristicasSinais, dados.IdentificadoresSinais)
            {
                Algorithm = (machine, classInputs, classOutputs, j, k) =>
                    new SequentialMinimalOptimization(machine, classInputs, classOutputs)
                    {
                        Complexity = 1
                    }
            };

            teacher.Run();
        }
        public void LearnTest()
        {

            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] xor =
            {
                -1,
                 1,
                 1,
                -1
            };

            var kernel = new Polynomial(2, 0.0);

            double[][] augmented = new double[inputs.Length][];
            for (int i = 0; i < inputs.Length; i++)
                augmented[i] = kernel.Transform(inputs[i]);

            SupportVectorMachine machine = new SupportVectorMachine(augmented[0].Length);

            // Create the Least Squares Support Vector Machine teacher
            var learn = new LinearDualCoordinateDescent(machine, augmented, xor);

            // Run the learning algorithm
            double error = learn.Run();

            Assert.AreEqual(0, error);

            int[] output = augmented.Apply(p => Math.Sign(machine.Compute(p)));
            for (int i = 0; i < output.Length; i++)
                Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
        }
        public void ExpandReverseDistanceTest()
        {
            for (int i = 1; i <= 10; i++)
            {
                Polynomial kernel = new Polynomial(i, 0);

                var x = new double[] { 0.5, 2.0 };
                var y = new double[] { 1.3, -0.2 };

                var phi_x = kernel.Transform(x);
                var phi_y = kernel.Transform(y);

                //int expected_size = (int)System.Math.Pow(x.Length, i);
                //Assert.AreEqual(phi_x.Length, phi_y.Length);
                //Assert.AreEqual(phi_x.Length, expected_size);

                double d = Distance.SquareEuclidean(x, y);
                double phi_d = kernel.ReverseDistance(phi_x, phi_y);

                Assert.AreEqual(phi_d, d, 1e-6);
                Assert.IsFalse(double.IsNaN(phi_d));
                Assert.IsFalse(double.IsNaN(d));
            }
        }
        public void ExpandDistanceTest()
        {
            for (int i = 1; i <= 10; i++)
            {
                Polynomial kernel = new Polynomial(i, 0);

                var x = new double[] { 0.5, 2.0 };
                var y = new double[] { 1.3, -0.2 };

                var phi_x = kernel.Transform(x);
                var phi_y = kernel.Transform(y);

                double d1 = Distance.SquareEuclidean(phi_x, phi_y);
                double d2 = kernel.Distance(x, y);
                double d3 = Accord.Statistics.Tools.Distance(kernel, x, y);

                Assert.AreEqual(d1, d2, 1e-4);
                Assert.AreEqual(d1, d3, 1e-4);
                Assert.IsFalse(double.IsNaN(d1));
                Assert.IsFalse(double.IsNaN(d2));
                Assert.IsFalse(double.IsNaN(d3));
            }
        }
        public void FunctionTest2()
        {
            // Tested against R's kernlab

            double[][] data = 
            {
                new double[] { 5.1, 3.5, 1.4, 0.2 },
                new double[] { 5.0, 3.6, 1.4, 0.2 },
                new double[] { 4.9, 3.0, 1.4, 0.2 },
                new double[] { 5.8, 4.0, 1.2, 0.2 },
                new double[] { 4.7, 3.2, 1.3, 0.2 },
            };

            // rbf <- polydot(3)

            Polynomial kernel = new Polynomial(degree: 3, constant: 1);

            // Compute the kernel matrix
            double[,] actual = new double[5, 5];
            for (int i = 0; i < 5; i++)
                for (int j = 0; j < 5; j++)
                    actual[i, j] = kernel.Function(data[i], data[j]);

            double[,] expected =
            {
                { 70240.51, 69426.53, 57022.17,  99252.85, 55002.06 },
                { 69426.53, 68719.48, 56181.89,  98099.75, 54353.80 },
                { 57022.17, 56181.89, 46694.89,  80286.11, 44701.08 },
                { 99252.85, 98099.75, 80286.11, 141583.69, 77635.89 },
                { 55002.06, 54353.80, 44701.08,  77635.89, 43095.88 },

            };

            // Assert both are equal
            for (int i = 0; i < 5; i++)
                for (int j = 0; j < 5; j++)
                    Assert.AreEqual(expected[i, j], actual[i, j], 1e-2);
        }
Exemple #13
0
        private void btnRunAnalysis_Click(object sender, EventArgs e)
        {
            if (dgvAnalysisSource.Rows.Count == 0)
            {
                MessageBox.Show("Please load the training data before clicking this button");
                return;
            }

            lbStatus.Text = "Gathering data. This may take a while...";
            Application.DoEvents();


            // Extract inputs and outputs
            int rows = dgvAnalysisSource.Rows.Count;
            double[][] input = Jagged.Zeros(rows, 32 * 32);
            int[] output = new int[rows];
            for (int i = 0; i < rows; i++)
            {
                input.SetRow(i, (double[])dgvAnalysisSource.Rows[i].Cells["colTrainingFeatures"].Value);
                output[i] = (int)dgvAnalysisSource.Rows[i].Cells["colTrainingLabel"].Value;
            }

            // Create the chosen Kernel with given parameters
            IKernel kernel;
            if (rbGaussian.Checked)
                kernel = new Gaussian((double)numSigma.Value);
            else
                kernel = new Polynomial((int)numDegree.Value, (double)numConstant.Value);

            // Create the Kernel Discriminant Analysis using the selected Kernel
            kda = new KernelDiscriminantAnalysis(kernel)
            {
                Threshold = (double)numThreshold.Value,
                Regularization = (double)numRegularization.Value
            };


            lbStatus.Text = "Computing the analysis. This may take a significant amount of time...";
            Application.DoEvents();

            // Compute the analysis. 
            kda.Learn(input, output);


            // Show information about the analysis in the form
            dgvPrincipalComponents.DataSource = kda.Discriminants;
            dgvFeatureVectors.DataSource = new ArrayDataView(kda.DiscriminantVectors);
            dgvClasses.DataSource = kda.Classes;

            // Create the component graphs
            distributionView.DataSource = kda.Discriminants;
            cumulativeView.DataSource = kda.Discriminants;

            lbStatus.Text = "Analysis complete. Click Classify to test the analysis.";

            btnClassify.Enabled = true;
        }
        public void ComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            IKernel kernel = new Polynomial(2);
            var msvm = new MultilabelSupportVectorMachine(5, kernel, 4);
            var smo = new MultilabelSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            double error = smo.Run();
            Assert.AreEqual(0, error);


            int[] evals = new int[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double[] responses; msvm.Compute(inputs[i], out responses);
                int actual; responses.Max(out actual);
                Assert.AreEqual(expected, actual);
                evals[i] = msvm.GetLastKernelEvaluations();
            }

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i]);
        }
        public void TransformTest()
        {
            var inputs = yinyang.Submatrix(null, 0, 1).ToArray();
            var labels = yinyang.GetColumn(2).ToInt32();
            
            ConfusionMatrix actual, expected;
            SequentialMinimalOptimization a, b;

            var kernel = new Polynomial(2, 0);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                a = new SequentialMinimalOptimization(machine, inputs, labels);
                a.UseComplexityHeuristic = true;
                a.Run();

                int[] values = new int[labels.Length];
                for (int i = 0; i < values.Length; i++)
                    values[i] = Math.Sign(machine.Compute(inputs[i]));

                expected = new ConfusionMatrix(values, labels);
            }

            {
                var projection = inputs.Apply(kernel.Transform);
                var machine = new SupportVectorMachine(projection[0].Length);
                b = new SequentialMinimalOptimization(machine, projection, labels);
                b.UseComplexityHeuristic = true;
                b.Run();

                int[] values = new int[labels.Length];
                for (int i = 0; i < values.Length; i++)
                    values[i] = Math.Sign(machine.Compute(projection[i]));

                actual = new ConfusionMatrix(values, labels);
            }

            Assert.AreEqual(a.Complexity, b.Complexity, 1e-15);
            Assert.AreEqual(expected.TrueNegatives, actual.TrueNegatives);
            Assert.AreEqual(expected.TruePositives, actual.TruePositives);
            Assert.AreEqual(expected.FalseNegatives, actual.FalseNegatives);
            Assert.AreEqual(expected.FalsePositives, actual.FalsePositives);
        }
Exemple #16
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Finishes and save any pending changes to the given data
            dgvAnalysisSource.EndEdit();

            if (dgvAnalysisSource.DataSource == null) return;

            // Creates a matrix from the source data table
            double[,] sourceMatrix = (dgvAnalysisSource.DataSource as DataTable).ToMatrix(out sourceColumns);

            // Creates a new Simple Descriptive Analysis
            sda = new DescriptiveAnalysis(sourceMatrix, sourceColumns);
            sda.Compute();
            dgvDistributionMeasures.DataSource = sda.Measures;

            // Populates statistics overview tab with analysis data
            dgvDistributionMeasures.DataSource = sda.Measures;

            IKernel kernel;
            if (rbGaussian.Checked)
            {
                kernel = new Gaussian((double)numSigma.Value);
            }
            else
            {
                kernel = new Polynomial((int)numDegree.Value, (double)numConstant.Value);
            }

            // Get only the input values (exclude the class label indicator column)
            double[,] data = sourceMatrix.Submatrix(null, startColumn: 0, endColumn: 1);

            // Get only the associated labels
            int[] labels = sourceMatrix.GetColumn(2).ToInt32();

            // Creates the Kernel Discriminant Analysis of the given source
            kda = new KernelDiscriminantAnalysis(data, labels, kernel);

            // Keep all components
            kda.Threshold = (double)numThreshold.Value;

            // Computes the analysis
            kda.Compute();

            // Perform the transformation of the data using two components
            double[,] result = kda.Transform(data, 2);

            // Create a new plot with the original Z column
            double[,] points = result.InsertColumn(sourceMatrix.GetColumn(2));

            // Create output scatter plot
            outputScatterplot.DataSource = points;
            CreateScatterplot(graphMapFeature, points);

            // Create output table
            dgvProjectionResult.DataSource = new ArrayDataView(points, sourceColumns);

            // Populates components overview with analysis data
            dgvFeatureVectors.DataSource = new ArrayDataView(kda.DiscriminantMatrix);
            dgvPrincipalComponents.DataSource = kda.Discriminants;
            dgvScatterBetween.DataSource = new ArrayDataView(kda.ScatterBetweenClass);
            dgvScatterWithin.DataSource = new ArrayDataView(kda.ScatterWithinClass);
            dgvScatterTotal.DataSource = new ArrayDataView(kda.ScatterMatrix);

            // Populates classes information
            dgvClasses.DataSource = kda.Classes;

            CreateComponentCumulativeDistributionGraph(graphCurve);
            CreateComponentDistributionGraph(graphShare);
        }
Exemple #17
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Finishes and save any pending changes to the given data
            dgvAnalysisSource.EndEdit();

            if (dgvAnalysisSource.DataSource == null) return;

            // Creates a matrix from the source data table
            double[,] sourceMatrix = (dgvAnalysisSource.DataSource as DataTable).ToMatrix(out sourceColumns);

            int rows = sourceMatrix.GetLength(0);
            int cols = sourceMatrix.GetLength(1);


            // Creates a new Simple Descriptive Analysis
            sda = new DescriptiveAnalysis(sourceMatrix, sourceColumns);

            sda.Compute();

            // Populates statistics overview tab with analysis data
            dgvDistributionMeasures.DataSource = sda.Measures;


            IKernel kernel;
            if (rbGaussian.Checked)
            {
                kernel = new Gaussian((double)numSigma.Value);
            }
            else
            {
                kernel = new Polynomial((int)numDegree.Value, (double)numConstant.Value);
            }


            // Get only the input values (exclude the class label indicator column)
            double[,] data = sourceMatrix.Submatrix(null, startColumn: 0, endColumn: 1);

            // Get only the associated labels
            int[] labels = sourceMatrix.GetColumn(2).ToInt32();


            // Creates the Kernel Principal Component Analysis of the given source
            kpca = new KernelPrincipalComponentAnalysis(data, kernel,
                (AnalysisMethod)cbMethod.SelectedValue);

            kpca.Center = cbCenter.Checked;

            // Compute the analysis
            kpca.Compute();


            
            double[,] result;

            if (kpca.Components.Count >= 2)
            {
                // Perform the transformation of the data using two components 
                result = kpca.Transform(data, 2);
            }
            else
            {
                result = kpca.Transform(data, 1);
                result = result.InsertColumn(Matrix.Vector(result.GetLength(0), 0.0));
            }

            // Create a new plot with the original Z column
            double[,] points = result.InsertColumn(sourceMatrix.GetColumn(2));

            // Create output scatter plot
            outputScatterplot.DataSource = points;
            CreateScatterplot(graphMapFeature, points);

            // Create output table
            dgvProjectionResult.DataSource = new ArrayDataView(points, sourceColumns);
            dgvReversionSource.DataSource = new ArrayDataView(kpca.Result);


            // Populates components overview with analysis data
            dgvFeatureVectors.DataSource = new ArrayDataView(kpca.ComponentMatrix);
            dgvPrincipalComponents.DataSource = kpca.Components;

            dgvProjectionComponents.DataSource = kpca.Components;
            dgvReversionComponents.DataSource = kpca.Components;

            numComponents.Maximum = kpca.Components.Count;
            numNeighbor.Maximum = kpca.Result.GetLength(0);
            numNeighbor.Value = System.Math.Min(10, numNeighbor.Maximum);

            CreateComponentCumulativeDistributionGraph(graphCurve);
            CreateComponentDistributionGraph(graphShare);

        }
        public void ComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };

            IKernel kernel = new Polynomial(2);
            var msvm = new MulticlassSupportVectorMachine(5, kernel, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            double error = smo.Run();

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination);
                Assert.AreEqual(expected, actual);
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting);
                Assert.AreEqual(expected, actual);
            }
        }
Exemple #19
0
        private void btnRunTraining_Click(object sender, EventArgs e)
        {
            if (dgvTrainingSource.Rows.Count == 0)
            {
                MessageBox.Show("Please load the training data before clicking this button");
                return;
            }

            lbStatus.Text = "Gathering data. This may take a while...";
            Application.DoEvents();



            // Extract inputs and outputs
            int rows = dgvTrainingSource.Rows.Count;
            double[][] input = new double[rows][];
            int[] output = new int[rows];
            for (int i = 0; i < rows; i++)
            {
                input[i] = (double[])dgvTrainingSource.Rows[i].Cells["colTrainingFeatures"].Value;
                output[i] = (int)dgvTrainingSource.Rows[i].Cells["colTrainingLabel"].Value;
            }

            // Create the chosen Kernel with given parameters
            IKernel kernel;
            if (rbGaussian.Checked)
            {
                kernel = new Gaussian((double)numSigma.Value);
            }
            else
            {
                if (numDegree.Value == 1)
                    kernel = new Linear((double)numConstant.Value);
                else
                    kernel = new Polynomial((int)numDegree.Value, (double)numConstant.Value);
            }

            // Create the Multi-class Support Vector Machine using the selected Kernel
            ksvm = new MulticlassSupportVectorMachine(1024, kernel, 10);

            // Create the learning algorithm using the machine and the training data
            MulticlassSupportVectorLearning ml = new MulticlassSupportVectorLearning(ksvm, input, output);

            // Extract training parameters from the interface
            double complexity = (double)numComplexity.Value;
            double tolerance = (double)numTolerance.Value;
            int cacheSize = (int)numCache.Value;
            SelectionStrategy strategy = (SelectionStrategy)cbStrategy.SelectedItem;

            // Configure the learning algorithm
            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs);
                smo.Complexity = complexity;
                smo.Tolerance = tolerance;
                smo.CacheSize = cacheSize;
                smo.Strategy = strategy;
                if (kernel is Linear) smo.Compact = true;
                return smo;
            };


            lbStatus.Text = "Training the classifiers. This may take a (very) significant amount of time...";
            Application.DoEvents();

            Stopwatch sw = Stopwatch.StartNew();

            // Train the machines. It should take a while.
            double error = ml.Run();

            sw.Stop();

            lbStatus.Text = String.Format(
                "Training complete ({0}ms, {1}er). Click Classify to test the classifiers.",
                sw.ElapsedMilliseconds, error);

            btnClassifyVoting.Enabled = true;
            btnClassifyElimination.Enabled = true;
            btnCalibration.Enabled = true;


            // Populate the information tab with the machines
            dgvMachines.Rows.Clear();
            int k = 1;
            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < i; j++, k++)
                {
                    var machine = ksvm[i, j];

                    int sv = machine.SupportVectors == null ? 0 : machine.SupportVectors.Length;

                    int c = dgvMachines.Rows.Add(k, i + "-vs-" + j, sv, machine.Threshold);
                    dgvMachines.Rows[c].Tag = machine;
                }
            }

            // approximate size in bytes = 
            //   number of support vectors *
            //   number of doubles in a support vector *
            //   size of double
            int bytes = ksvm.SupportVectorUniqueCount * 1024 * sizeof(double);
            float megabytes = bytes / (1024 * 1024);
            lbSize.Text = String.Format("{0} ({1} MB)", ksvm.SupportVectorUniqueCount, megabytes);
        }
        public void TransformTest_Quadratic()
        {
            double[][] data = 
            {
                new double[] { 5.1, 3.5, 1.4, 0.2 },
                new double[] { 5.0, 3.6, 1.4, 0.2 },
                new double[] { 4.9, 3.0, 1.4, 0.2 },
                new double[] { 5.8, 4.0, 1.2, 0.2 },
                new double[] { 4.7, 3.2, 1.3, 0.2 },
            };

            var target = new Polynomial(2);
            var quadratic = new Quadratic();

            double[][] expected = data.Apply(quadratic.Transform);
            double[][] actual = data.Apply(target.Transform);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));
        }
        public void KernelFunctionCacheConstructorTest7()
        {
            double[][] inputs =
            {
                new double[] { 0, 1 },
                new double[] { 1, 0 },
                new double[] { 1, 1 },
            };

            IKernel kernel = new Polynomial(2);

            int cacheSize = inputs.Length;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(3, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);

            // upper half
            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = i + 1; j < inputs.Length; j++)
                {
                    double expected = kernel.Function(inputs[i], inputs[j]);
                    double actual = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            var lruList1 = target.GetLeastRecentlyUsedList();

            Assert.AreEqual(3, target.Misses);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(1.0, target.Usage);

            // upper half, backwards
            for (int i = inputs.Length - 1; i >= 0; i--)
            {
                for (int j = inputs.Length - 1; j >= i; j--)
                {
                    double expected = kernel.Function(inputs[i], inputs[j]);
                    double actual = target.GetOrCompute(j, i);

                    Assert.AreEqual(expected, actual);
                }
            }

            var lruList2 = target.GetLeastRecentlyUsedList();

            Assert.IsTrue(lruList2.SequenceEqual(lruList1.Reverse())); 

            Assert.AreEqual(3, target.Misses);
            Assert.AreEqual(3, target.Hits);
            Assert.AreEqual(1.0, target.Usage);
        }
        public void multiclass_precomputed_matrix_smo()
        {
            #region doc_precomputed
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] trainInputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] trainOutputs = // those are the training set class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Let's chose a kernel function
            Polynomial kernel = new Polynomial(2);

            // Get the kernel matrix for the training set
            double[][] K = kernel.ToJagged(trainInputs);

            // Create a pre-computed kernel
            var pre  = new Precomputed(K);

            // Create a one-vs-one learning algorithm using SMO
            var teacher = new MulticlassSupportVectorLearning<Precomputed, int>()
            {
                Learner = (p) => new SequentialMinimalOptimization<Precomputed, int>()
                {
                    Kernel = pre
                }
            };

#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif

            // Learn a machine
            var machine = teacher.Learn(pre.Indices, trainOutputs);

            // Compute the machine's prediction for the training set
            int[] trainPrediction = machine.Decide(pre.Indices);

            // Evaluate prediction error for the training set using mean accuracy (mAcc)
            double trainingError = new ZeroOneLoss(trainOutputs).Loss(trainPrediction);

            // Now let's compute the machine's prediction for a test set
            double[][] testInputs = // test-set inputs
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] testOutputs = // those are the test set class labels
            {
                0, 0,  1,  2, 
            };

            // Compute precomputed matrix between train and testing
            pre.Values = kernel.ToJagged2(trainInputs, testInputs);

            // Update the kernel
            machine.Kernel = pre;

            // Compute the machine's prediction for the test set
            int[] testPrediction = machine.Decide(pre.Indices);

            // Evaluate prediction error for the training set using mean accuracy (mAcc)
            double testError = new ZeroOneLoss(testOutputs).Loss(testPrediction);
            #endregion


            Assert.AreEqual(0, trainingError);
            Assert.AreEqual(0, testError);

            // Create a one-vs-one learning algorithm using SMO
            var teacher2 = new MulticlassSupportVectorLearning<Polynomial>()
            {
                Learner = (p) => new SequentialMinimalOptimization<Polynomial>()
                {
                    Kernel = kernel
                }
            };

#if DEBUG
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif

            // Learn a machine
            var expected = teacher2.Learn(trainInputs, trainOutputs);

            Assert.AreEqual(4, expected.NumberOfInputs);
            Assert.AreEqual(3, expected.NumberOfOutputs);
            Assert.AreEqual(0, machine.NumberOfInputs);
            Assert.AreEqual(3, machine.NumberOfOutputs);

            var machines = Enumerable.Zip(machine, expected, (a,b) => Tuple.Create(a.Value, b.Value));

            foreach (var pair in machines)
            {
                var a = pair.Item1;
                var e = pair.Item2;

                Assert.AreEqual(0, a.NumberOfInputs);
                Assert.AreEqual(2, a.NumberOfOutputs);

                Assert.AreEqual(4, e.NumberOfInputs);
                Assert.AreEqual(2, e.NumberOfOutputs);

                Assert.IsTrue(a.Weights.IsEqual(e.Weights));
            }
        }
        public void TransformTest_Linear()
        {
            double[][] data = 
            {
                new double[] { 5.1, 3.5, 1.4, 0.2 },
                new double[] { 5.0, 3.6, 1.4, 0.2 },
                new double[] { 4.9, 3.0, 1.4, 0.2 },
                new double[] { 5.8, 4.0, 1.2, 0.2 },
                new double[] { 4.7, 3.2, 1.3, 0.2 },
            };

            var target = new Polynomial(1);
            var linear = new Linear(constant: 1);
            Assert.AreEqual(target.Constant, linear.Constant);

            double[][] expected = data.Apply(x => linear.Transform(x));
            double[][] actual = data.Apply(target.Transform);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));
        }
        public void RevertTest3()
        {

            string path = @"..\..\..\..\Unit Tests\Accord.Tests.Statistics\Resources\examples.xls";

            // Create a new reader, opening a given path
            ExcelReader reader = new ExcelReader(path);

            // Afterwards, we can query the file for all
            // worksheets within the specified workbook:
            string[] sheets = reader.GetWorksheetList();

            // Finally, we can request an specific sheet:
            DataTable table = reader.GetWorksheet("Wikipedia");

            // Now, we have loaded the Excel file into a DataTable. We
            // can go further and transform it into a matrix to start
            // running other algorithms on it: 

            double[,] matrix = table.ToMatrix();

            IKernel kernel = new Polynomial(2);

            // Create analysis
            KernelPrincipalComponentAnalysis target = new KernelPrincipalComponentAnalysis(matrix,
                kernel, AnalysisMethod.Center, centerInFeatureSpace: true);

            target.Compute();

            double[,] forward = target.Result;

            double[,] reversion = target.Revert(forward);

            Assert.IsTrue(!reversion.HasNaN());
        }
        public void ComputeTest2()
        {
            double[][] input =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] output =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            IKernel kernel = new Polynomial(2);
            int classes = 4;
            int inputs = 5;


            // Create the Multi-class Support Vector Machine using the selected Kernel
            var msvm = new MulticlassSupportVectorMachine(inputs, kernel, classes);

            // Create the learning algorithm using the machine and the training data
            var ml = new MulticlassSupportVectorLearning(msvm, input, output);

            // Configure the learning algorithm
            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };
                return smo;
            };

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            // Executes the training algorithm
            double error = ml.Run();

            Assert.AreEqual(6, msvm.GetLastKernelEvaluations());

            int[] evals = new int[input.Length];
            int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 };
#if NET35
            AForge.Parallel.For(0, input.Length, i =>
#else
            Parallel.For(0, input.Length, i =>
#endif
            {
                double[] data = input[i];
                double[] responses;

                int num = msvm.Compute(data, MulticlassComputeMethod.Elimination, out responses);
                Assert.AreEqual(output[i], num);

                evals[i] = msvm.GetLastKernelEvaluations();
            });

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(evals[i], evalexp[i]);

#if NET35
            AForge.Parallel.For(0, input.Length, i =>
#else
            Parallel.For(0, input.Length, i =>
#endif
            {
                double[] data = input[i];
                double[] responses;

                int num = msvm.Compute(data, MulticlassComputeMethod.Voting, out responses);
                Assert.AreEqual(output[i], num);

                evals[i] = msvm.GetLastKernelEvaluations();
            });

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i]);
        }
        public void ComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            IKernel kernel = new Polynomial(2);
            var msvm = new MulticlassSupportVectorMachine(5, kernel, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            double error = smo.Run();

            Assert.AreEqual(6, msvm.GetLastKernelEvaluations());

            int[] evals = new int[inputs.Length];
            int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 };
            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination);
                Assert.AreEqual(expected, actual);
                evals[i] = msvm.GetLastKernelEvaluations();
            }

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(evals[i], evalexp[i]);

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting);
                Assert.AreEqual(expected, actual);
                evals[i] = msvm.GetLastKernelEvaluations();
            }

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i], 1);
        }
        public void GridsearchConstructorTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Example binary data
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] xor = // xor labels
            {
                -1, 1, 1, -1
            };

            // Declare the parameters and ranges to be searched
            GridSearchRange[] ranges = 
            {
                new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 0.50 } ),
                new GridSearchRange("degree",     new double[] { 1, 10, 2, 3, 4, 5 } ),
                new GridSearchRange("constant",   new double[] { 0, 1, 2 } )
            };


            // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines
            var gridsearch = new GridSearch<KernelSupportVectorMachine>(ranges);

            // Set the fitting function for the algorithm
            gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error)
            {
                // The parameters to be tried will be passed as a function parameter.
                int degree = (int)parameters["degree"].Value;
                double constant = parameters["constant"].Value;
                double complexity = parameters["complexity"].Value;

                // Use the parameters to build the SVM model
                Polynomial kernel = new Polynomial(degree, constant);
                KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(kernel, 2);

                // Create a new learning algorithm for SVMs
                SequentialMinimalOptimization smo = new SequentialMinimalOptimization(ksvm, inputs, xor);
                smo.Complexity = complexity;

                // Measure the model performance to return as an out parameter
                error = smo.Run();

                return ksvm; // Return the current model
            };


            // Declare some out variables to pass to the grid search algorithm
            GridSearchParameterCollection bestParameters; double minError;

            // Compute the grid search to find the best Support Vector Machine
            KernelSupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError);


            // A linear kernel can't solve the xor problem.
            Assert.AreNotEqual((int)bestParameters["degree"].Value, 1);

            // The minimum error should be zero because the problem is well-known.
            Assert.AreEqual(minError, 0.0);


            Assert.IsNotNull(bestModel);
            Assert.IsNotNull(bestParameters);
            Assert.AreEqual(bestParameters.Count, 3);
        }
Exemple #28
0
        /// <summary>
        ///   Estimates a suitable value for SMO's Complexity parameter C.
        /// </summary>
        /// 
        private void btnEstimateC_Click(object sender, EventArgs e)
        {
            // Extract inputs
            int rows = dgvTrainingSource.Rows.Count;
            double[][] input = new double[rows][];
            for (int i = 0; i < rows; i++)
                input[i] = (double[])dgvTrainingSource.Rows[i].Cells["colTrainingFeatures"].Value;

            IKernel kernel;
            if (rbGaussian.Checked)
                kernel = new Gaussian((double)numSigma.Value);
            else
                kernel = new Polynomial((int)numDegree.Value, (double)numConstant.Value);

            numComplexity.Value = (decimal)SequentialMinimalOptimization.EstimateComplexity(kernel, input);
        }
Exemple #29
0
        private IKernel getKernel()
        {
            IKernel kernel;
            if (rbGaussian.Checked)
            {
                kernel = new Gaussian((double)numSigma.Value);
            }
            else if (rbPolynomial.Checked)
            {
                kernel = new Polynomial((int)numDegree.Value, (double)numSigAlpha.Value);
            }
            else if (rbLaplacian.Checked)
            {
                kernel = new Laplacian((double)numLaplacianSigma.Value);
            }
            else if (rbSigmoid.Checked)
            {
                kernel = new Sigmoid((double)numSigAlpha.Value, (double)numSigB.Value);
            }
            else throw new Exception();

            return kernel;
        }