Beispiel #1
0
        public void ComputeTest3()
        {
            // Schölkopf KPCA toy example
            double[][] inputs = scholkopf().ToJagged();

            int[] output = Matrix.Expand(new int[, ] {
                { 1 }, { 2 }, { 3 }
            }, new int[] { 30, 30, 30 }).GetColumn(0);

            IKernel kernel = new Gaussian(0.2);
            var     target = new KernelDiscriminantAnalysis(inputs, output, kernel);

            target.Compute();


            double[][] actual = target.Transform(inputs, 2);

            double[][] expected1 =
            {
                new double[] { 1.2785801485080475, 0.20539157505913622 },
                new double[] { 1.2906613255489541, 0.20704272225753775 },
                new double[] { 1.2978134597266808, 0.20802649628632208 },
            };

            double[][] actual1 = actual.Submatrix(0, 2, 0, 1);

            Assert.IsTrue(Matrix.IsEqual(actual1, expected1, 0.0000001));

            // Assert the result equals the transformation of the input
            double[][] result     = target.Result.ToJagged();
            double[][] projection = target.Transform(inputs);
            Assert.IsTrue(Matrix.IsEqual(result, projection));

            int[] actual2 = target.Classify(inputs);
            Assert.IsTrue(Matrix.IsEqual(actual2, output));

            int[]      actual3 = new int[inputs.Length];
            double[][] scores  = new double[inputs.Length][];
            for (int i = 0; i < inputs.Length; i++)
            {
                actual3[i] = target.Classify(inputs[i], out scores[i]);
            }
            Assert.IsTrue(Matrix.IsEqual(actual3, output));

            scores = scores.Get(0, 5, null);

            double[][] expected = new double[][] {
                new double[] { -6.23928931356786E-06, -5.86731829543872, -4.76988430445096 },
                new double[] { -9.44593697210785E-05, -5.92312597750504, -4.82189359956088 },
                new double[] { -0.000286839977573986, -5.95629842504978, -4.85283341267476 },
                new double[] { -4.38986003009456E-05, -5.84990179343448, -4.75189423787298 },
                new double[] { -0.000523817959022851, -5.77534144986199, -4.683120454667 }
            };

            //Assert.IsTrue(Matrix.IsEqual(scores, expected, 1e-6));
        }
Beispiel #2
0
        private void btnClassify_Click(object sender, EventArgs e)
        {
            if (dgvAnalysisTesting.Rows.Count == 0)
            {
                MessageBox.Show("Please load the testing data before clicking this button");
                return;
            }
            else if (kda == null)
            {
                MessageBox.Show("Please perform the analysis before attempting classification");
                return;
            }

            lbStatus.Text = "Classification started. This may take a while...";
            Application.DoEvents();

            int hits = 0;

            progressBar.Visible = true;
            progressBar.Value   = 0;
            progressBar.Step    = 1;
            progressBar.Maximum = dgvAnalysisTesting.Rows.Count;

            // Extract inputs
            foreach (DataGridViewRow row in dgvAnalysisTesting.Rows)
            {
                double[] input    = (double[])row.Cells["colTestingFeatures"].Value;
                int      expected = (int)row.Cells["colTestingExpected"].Value;

                int output = kda.Classify(input);
                row.Cells["colTestingOutput"].Value = output;

                if (expected == output)
                {
                    row.Cells[0].Style.BackColor = Color.LightGreen;
                    row.Cells[1].Style.BackColor = Color.LightGreen;
                    row.Cells[2].Style.BackColor = Color.LightGreen;
                    hits++;
                }
                else
                {
                    row.Cells[0].Style.BackColor = Color.White;
                    row.Cells[1].Style.BackColor = Color.White;
                    row.Cells[2].Style.BackColor = Color.White;
                }

                progressBar.PerformStep();
            }

            progressBar.Visible = false;

            lbStatus.Text = String.Format("Classification complete. Hits: {0}/{1} ({2:0%})",
                                          hits, dgvAnalysisTesting.Rows.Count, (double)hits / dgvAnalysisTesting.Rows.Count);
        }
Beispiel #3
0
        public void ClassifyTest1()
        {
            // Create some sample input data

            // This is the same data used in the example by Gutierrez-Osuna
            // http://research.cs.tamu.edu/prism/lectures/pr/pr_l10.pdf

            double[,] inputs =
            {
                {  4,  1 }, // Class 1
                {  2,  4 },
                {  2,  3 },
                {  3,  6 },
                {  4,  4 },

                {  9, 10 }, // Class 2
                {  6,  8 },
                {  9,  5 },
                {  8,  7 },
                { 10,  8 }
            };

            int[] output =
            {
                1, 1, 1, 1, 1, // Class labels for the input vectors
                2, 2, 2, 2, 2
            };

            // Create a new Linear Discriminant Analysis object
            var lda = new KernelDiscriminantAnalysis(inputs, output, new Linear());

            // Compute the analysis
            lda.Compute();

            // Test the classify method
            for (int i = 0; i < 5; i++)
            {
                int expected = 1;
                int actual   = lda.Classify(inputs.GetRow(i));
                Assert.AreEqual(expected, actual);
            }

            for (int i = 5; i < 10; i++)
            {
                int expected = 2;
                int actual   = lda.Classify(inputs.GetRow(i));
                Assert.AreEqual(expected, actual);
            }
        }
Beispiel #4
0
        private void graphMapInput_MouseMove(object sender, MouseEventArgs e)
        {
            double x;
            double y;

            graphMapInput.GraphPane.ReverseTransform(new PointF(e.X, e.Y), out x, out y);

            double[,] data = new double[1, 2];
            data[0, 0]     = x;
            data[0, 1]     = y;


            double[,] result = kda.Transform(data);

            int c = kda.Classify(new double[] { x, y });

            graphMapFeature.GraphPane.CurveList["M1"].Clear();
            graphMapFeature.GraphPane.CurveList["M2"].Clear();
            graphMapFeature.GraphPane.CurveList["M3"].Clear();

            if (c == 1)
            {
                graphMapFeature.GraphPane.CurveList["M1"].AddPoint(result[0, 0], result[0, 1]);
            }
            else if (c == 2)
            {
                graphMapFeature.GraphPane.CurveList["M2"].AddPoint(result[0, 0], result[0, 1]);
            }
            else
            {
                graphMapFeature.GraphPane.CurveList["M3"].AddPoint(result[0, 0], result[0, 1]);
            }

            graphMapFeature.Invalidate();
        }
Beispiel #5
0
        public void ClassifyTest()
        {
            // Create some sample input data instances. This is the same
            // data used in the Gutierrez-Osuna's example available on:
            // http://research.cs.tamu.edu/prism/lectures/pr/pr_l10.pdf

            double[][] inputs =
            {
                // Class 0
                new double[] {  4,  1 },
                new double[] {  2,  4 },
                new double[] {  2,  3 },
                new double[] {  3,  6 },
                new double[] {  4,  4 },

                // Class 1
                new double[] {  9, 10 },
                new double[] {  6,  8 },
                new double[] {  9,  5 },
                new double[] {  8,  7 },
                new double[] { 10,  8 }
            };

            int[] output =
            {
                0, 0, 0, 0, 0, // The first five are from class 0
                1, 1, 1, 1, 1  // The last five are from class 1
            };

            // Now we can chose a kernel function to
            // use, such as a linear kernel function.
            IKernel kernel = new Linear();

            // Then, we will create a KDA using this linear kernel.
            var kda = new KernelDiscriminantAnalysis(inputs, output, kernel);

            kda.Compute(); // Compute the analysis


            // Now we can project the data into KDA space:
            double[][] projection = kda.Transform(inputs);

            double[][] classifierProjection = kda.Classifier.First.Transform(inputs);
            Assert.IsTrue(projection.IsEqual(classifierProjection));

            // Or perform classification using:
            int[] results = kda.Classify(inputs);

            string str = projection.ToCSharp();

            double[][] expected = new double[][] {
                new double[] { 80.7607049998409, -5.30485371541545E-06, 6.61304584781419E-06, 4.52807990036774E-06, -3.44409628150189E-06, 3.69094504515388E-06, -1.33641000168438E-05, -0.000132874977040842, -0.000261921590627878, 1.22137997452386 },
                new double[] { 67.6629612351861, 6.80622743409742E-06, -8.48466262226566E-06, -5.80961187779394E-06, 4.4188405141643E-06, -4.73555212510135E-06, 1.71463925084936E-05, 0.000170481102685471, 0.000336050342774286, -1.5670535522193 },
                new double[] { 59.8679301679674, 4.10375477777336E-06, -5.11575246520124E-06, -3.50285421113483E-06, 2.66430090034575E-06, -2.85525936627451E-06, 1.03382660725515E-05, 0.00010279007663172, 0.000202618589039361, -0.944841112367518 },
                new double[] { 101.494441852779, 1.02093411395998E-05, -1.27269939227403E-05, -8.71441780958548E-06, 6.62826077091339E-06, -7.10332818965043E-06, 2.57195887591877E-05, 0.000255721654028207, 0.000504075514164981, -2.35058032832894 },
                new double[] { 104.145798201497, 2.80256425000402E-06, -3.49368461627364E-06, -2.39219308895144E-06, 1.81952256639306E-06, -1.94993321933623E-06, 7.06027928387698E-06, 7.01981011275166E-05, 0.000138373670580449, -0.645257345031474 },
                new double[] { 242.123077020588, 9.00824221261587E-06, -1.12297005614437E-05, -7.689192102589E-06, 5.84846541151762E-06, -6.26764250277745E-06, 2.26937548148953E-05, 0.000225636753569347, 0.000444772512580016, -2.07404146617259 },
                new double[] { 171.808759436683, 9.60879168943052E-06, -1.19783472456447E-05, -8.2018049702981E-06, 6.23836308744075E-06, -6.68548535731617E-06, 2.42066717959233E-05, 0.000240679203812988, 0.000474424013376051, -2.21231089725078 },
                new double[] { 203.147921684494, -4.5041210583463E-06, 5.61485022387842E-06, 3.8445962076139E-06, -2.92423269243614E-06, 3.13382127359318E-06, -1.13468773577097E-05, -0.000112818376692303, -0.000222386256126583, 1.03702073308629 },
                new double[] { 200.496565335776, 2.90265583302585E-06, -3.61845908969372E-06, -2.47762852723099E-06, 1.88450551963371E-06, -2.01957368695105E-06, 7.31243213181187E-06, 7.27051762225983E-05, 0.000143315587422421, -0.668302250211177 },
                new double[] { 244.774433369306, 1.60146531058558E-06, -1.99639123366069E-06, -1.36696743169296E-06, 1.0397271781315E-06, -1.11424755644407E-06, 4.03444536090092E-06, 4.01132006970784E-05, 7.90706689741683E-05, -0.368718482875124 }
            };

            Assert.IsTrue(expected.IsEqual(projection, 1e-6));

            // Test the classify method
            for (int i = 0; i < 5; i++)
            {
                int actual = results[i];
                Assert.AreEqual(0, actual);
            }

            for (int i = 5; i < 10; i++)
            {
                int actual = results[i];
                Assert.AreEqual(1, actual);
            }
        }
        public void ClassifyTest()
        {
            // Create some sample input data instances. This is the same
            // data used in the Gutierrez-Osuna's example available on:
            // http://research.cs.tamu.edu/prism/lectures/pr/pr_l10.pdf

            double[][] inputs =
            {
                // Class 0
                new double[] {  4,  1 },
                new double[] {  2,  4 },
                new double[] {  2,  3 },
                new double[] {  3,  6 },
                new double[] {  4,  4 },

                // Class 1
                new double[] {  9, 10 },
                new double[] {  6,  8 },
                new double[] {  9,  5 },
                new double[] {  8,  7 },
                new double[] { 10,  8 }
            };

            int[] output =
            {
                0, 0, 0, 0, 0, // The first five are from class 0
                1, 1, 1, 1, 1  // The last five are from class 1
            };

            // Now we can chose a kernel function to
            // use, such as a linear kernel function.
            IKernel kernel = new Linear();

            // Then, we will create a KDA using this linear kernel.
            var kda = new KernelDiscriminantAnalysis(inputs, output, kernel);

            kda.Compute(); // Compute the analysis


            // Now we can project the data into KDA space:
            double[][] projection = kda.Transform(inputs);

            // Or perform classification using:
            int[] results = kda.Classify(inputs);


            // Test the classify method
            for (int i = 0; i < 5; i++)
            {
                int expected = 0;
                int actual   = results[i];
                Assert.AreEqual(expected, actual);
            }

            for (int i = 5; i < 10; i++)
            {
                int expected = 1;
                int actual   = results[i];
                Assert.AreEqual(expected, actual);
            }
        }
Beispiel #7
0
 public int computeNew(double[] input)
 {
     return(kda.Classify(input));
 }