コード例 #1
0
        public void RunTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Sample data
            //   The following is a simple auto association function
            //   in which each input correspond to its own class. This
            //   problem should be easily solved using a Linear kernel.

            // Sample input data
            double[][] inputs =
            {
                new double[] { 0 },
                new double[] { 3 },
                new double[] { 1 },
                new double[] { 2 },
            };

            // Output for each of the inputs
            int[] outputs = { 0, 3, 1, 2 };


            // Create a new Linear kernel
            IKernel kernel = new Linear();

            // Create a new Multi-class Support Vector Machine for one input,
            //  using the linear kernel and four disjoint classes.
            var machine = new MulticlassSupportVectorMachine(1, kernel, 4);

            // Create the Multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            // Configure the learning algorithm to use SMO to train the
            //  underlying SVMs in each of the binary class subproblems.
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            // Run the learning algorithm
            double error = teacher.Run();

            Assert.AreEqual(0, error);
            Assert.AreEqual(0, machine.Compute(inputs[0]));
            Assert.AreEqual(3, machine.Compute(inputs[1]));
            Assert.AreEqual(1, machine.Compute(inputs[2]));
            Assert.AreEqual(2, machine.Compute(inputs[3]));

        }
コード例 #2
0
        /// <summary>
        /// Calculates the output for the svm and saves each result to a text file, one per line.
        /// </summary>
        /// <param name="machine">MulticlassSupportVectorMachine</param>
        /// <param name="inputs">double[][]</param>
        /// <param name="path">string</param>
        /// <returns>int - number of rows processed</returns>
        public static int SaveOutput(MulticlassSupportVectorMachine machine, double[][] inputs, string path)
        {
            File.AppendAllText(path, "ImageId,Label\r\n");

            for (int i = 0; i < inputs.Length; i++)
            {
                int output = machine.Compute(inputs[i]);
                File.AppendAllText(path, (i + 1) + "," + output.ToString() + "\r\n");
            }

            return inputs.Length;
        }
コード例 #3
0
        /// <summary>
        /// Calculates the accuracy for a trainined SVM against the data.
        /// </summary>
        /// <param name="machine">MulticlassSupportVectorMachine</param>
        /// <param name="data">List of DigitData</param>
        /// <returns>double</returns>
        public static double CalculateAccuracy(MulticlassSupportVectorMachine machine, double[][] inputs, int[] outputs)
        {
            double correct = 0;

            for (int i = 0; i < inputs.Length; i++)
            {
                int output = machine.Compute(inputs[i]);
                if (output == outputs[i])
                {
                    correct++;
                }
            }

            return (correct / (double)inputs.Length);
        }
コード例 #4
0
        public void ComputeTest2()
        {
            double[][] input =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] output =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            IKernel kernel = new Polynomial(2);
            int classes = 4;
            int inputs = 5;


            // Create the Multi-class Support Vector Machine using the selected Kernel
            var msvm = new MulticlassSupportVectorMachine(inputs, kernel, classes);

            // Create the learning algorithm using the machine and the training data
            var ml = new MulticlassSupportVectorLearning(msvm, input, output);

            // Configure the learning algorithm
            ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };
                return smo;
            };

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            // Executes the training algorithm
            double error = ml.Run();

            Assert.AreEqual(6, msvm.GetLastKernelEvaluations());

            int[] evals = new int[input.Length];
            int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 };
#if NET35
            AForge.Parallel.For(0, input.Length, i =>
#else
            Parallel.For(0, input.Length, i =>
#endif
            {
                double[] data = input[i];
                double[] responses;

                int num = msvm.Compute(data, MulticlassComputeMethod.Elimination, out responses);
                Assert.AreEqual(output[i], num);

                evals[i] = msvm.GetLastKernelEvaluations();
            });

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(evals[i], evalexp[i]);

#if NET35
            AForge.Parallel.For(0, input.Length, i =>
#else
            Parallel.For(0, input.Length, i =>
#endif
            {
                double[] data = input[i];
                double[] responses;

                int num = msvm.Compute(data, MulticlassComputeMethod.Voting, out responses);
                Assert.AreEqual(output[i], num);

                evals[i] = msvm.GetLastKernelEvaluations();
            });

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i]);
        }
コード例 #5
0
        public void ComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            IKernel kernel = new Polynomial(2);
            var msvm = new MulticlassSupportVectorMachine(5, kernel, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            double error = smo.Run();

            Assert.AreEqual(6, msvm.GetLastKernelEvaluations());

            int[] evals = new int[inputs.Length];
            int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 };
            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination);
                Assert.AreEqual(expected, actual);
                evals[i] = msvm.GetLastKernelEvaluations();
            }

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(evals[i], evalexp[i]);

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting);
                Assert.AreEqual(expected, actual);
                evals[i] = msvm.GetLastKernelEvaluations();
            }

            for (int i = 0; i < evals.Length; i++)
                Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i], 1);
        }
        public void RunTest3()
        {

            double[][] inputs =
            {
                // Tickets with the following structure should be assigned to location 0
                new double[] { 1, 4, 2, 0, 1 }, // should be assigned to location 0
                new double[] { 1, 3, 2, 0, 1 }, // should be assigned to location 0

                // Tickets with the following structure should be assigned to location 1
                new double[] { 3, 0, 1, 1, 1 }, // should be assigned to location 1
                new double[] { 3, 0, 1, 0, 1 }, // should be assigned to location 1

                // Tickets with the following structure should be assigned to location 2
                new double[] { 0, 5, 5, 5, 5 }, // should be assigned to location 2
                new double[] { 1, 5, 5, 5, 5 }, // should be assigned to location 2

                // Tickets with the following structure should be assigned to location 3
                new double[] { 1, 0, 0, 0, 0 }, // should be assigned to location 3
                new double[] { 1, 0, 0, 0, 0 }, // should be assigned to location 3
            };

            int[] outputs =
            {
                0, 0, // Those are the locations for the first two vectors above
                1, 1, // Those are the locations for the next two vectors above
                2, 2, // Those are the locations for the next two vectors above
                3, 3, // Those are the locations for the last two vectors above
            };

            // Since this is a simplification, a linear machine will suffice:
            IKernel kernel = new Linear();

            // Create the machine for feature vectors of length 5, for 4 possible locations
            MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(5, kernel, 4);

            // Create a new learning algorithm to train the machine
            MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            // Use the standard SMO algorithm
            target.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            // Train the machines
            double actual = target.Run();


            // Compute the answer for all training samples
            for (int i = 0; i < inputs.Length; i++)
            {
                double[] answersWeights;

                double answer = machine.Compute(inputs[i], MulticlassComputeMethod.Voting, out answersWeights);

                // Assert it has been classified correctly
                Assert.AreEqual(outputs[i], answer);

                // Assert the most probable answer is indeed the correct one
                int imax; Matrix.Max(answersWeights, out imax);
                Assert.AreEqual(answer, imax);
            }

        }
        public void RunTest2()
        {

            double[][] inputs =
            {
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 0, 0, 1, 0 }, // 0
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 1, 1, 1, 1 }, // 2
                new double[] { 1, 0, 1, 1 }, // 2
                new double[] { 1, 1, 0, 1 }, // 2
                new double[] { 0, 1, 1, 1 }, // 2
                new double[] { 1, 1, 1, 1 }, // 2
            };

            int[] outputs =
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            IKernel kernel = new Linear();
            MulticlassSupportVectorMachine machine = new MulticlassSupportVectorMachine(4, kernel, 3);
            MulticlassSupportVectorLearning target = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            target.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);


            double actual = target.Run();
            double expected = 0;

            Assert.AreEqual(expected, actual);

            for (int i = 0; i < inputs.Length; i++)
            {
                actual = machine.Compute(inputs[i]);
                expected = outputs[i];
                Assert.AreEqual(expected, actual);
            }

        }
コード例 #8
0
        public void ApplyTest2()
        {
            // Suppose we have a data table relating the age of
            // a person and its categorical classification, as 
            // in "child", "adult" or "elder".

            // The Codification filter is able to extract those
            // string labels and transform them into discrete
            // symbols, assigning integer labels to each of them
            // such as "child" = 0, "adult" = 1, and "elder" = 3.

            // Create the aforementioned sample table
            DataTable table = new DataTable("Sample data");
            table.Columns.Add("Age", typeof(int));
            table.Columns.Add("Label", typeof(string));

            //            age   label
            table.Rows.Add(10, "child");
            table.Rows.Add(07, "child");
            table.Rows.Add(04, "child");
            table.Rows.Add(21, "adult");
            table.Rows.Add(27, "adult");
            table.Rows.Add(12, "child");
            table.Rows.Add(79, "elder");
            table.Rows.Add(40, "adult");
            table.Rows.Add(30, "adult");


            // Now, let's say we need to translate those text labels
            // into integer symbols. Let's use a Codification filter:

            Codification codebook = new Codification(table);


            // After that, we can use the codebook to "translate"
            // the text labels into discrete symbols, such as:

            int a = codebook.Translate("Label", "child"); // returns 0
            int b = codebook.Translate("Label", "adult"); // returns 1
            int c = codebook.Translate("Label", "elder"); // returns 2

            // We can also do the reverse:
            string labela = codebook.Translate("Label", 0); // returns "child"
            string labelb = codebook.Translate("Label", 1); // returns "adult"
            string labelc = codebook.Translate("Label", 2); // returns "elder"


            // We can also process an entire data table at once:
            DataTable result = codebook.Apply(table);

            // The resulting table can be transformed to jagged array:
            double[][] matrix = Matrix.ToArray(result);

            // and the resulting matrix will be given by
            string str = matrix.ToString(CSharpJaggedMatrixFormatProvider.InvariantCulture);

            // str == new double[][] 
            // {
            //     new double[] { 10, 0 },
            //     new double[] {  7, 0 },
            //     new double[] {  4, 0 },
            //     new double[] { 21, 1 },
            //     new double[] { 27, 1 },
            //     new double[] { 12, 0 },
            //     new double[] { 79, 2 },
            //     new double[] { 40, 1 },
            //     new double[] { 30, 1 } 
            // };



            // Now we will be able to feed this matrix to any machine learning
            // algorithm without having to worry about text labels in our data:

            int classes = codebook["Label"].Symbols; // 3 classes (child, adult, elder)

            // Use the first column as input variables,
            // and the second column as outputs classes
            //
            double[][] inputs = matrix.GetColumns(0);
            int[] outputs = matrix.GetColumn(1).ToInt32();


            // Create a multi-class SVM for 1 input (Age) and 3 classes (Label)
            var machine = new MulticlassSupportVectorMachine(inputs: 1, classes: classes);

            // Create a Multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs);

            // Configure the learning algorithm to use SMO to train the
            //  underlying SVMs in each of the binary class subproblems.
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
            {
                return new SequentialMinimalOptimization(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };
            };

            // Run the learning algorithm
            double error = teacher.Run();


            // After we have learned the machine, we can use it to classify
            // new data points, and use the codebook to translate the machine
            // outputs to the original text labels:

            string result1 = codebook.Translate("Label", machine.Compute(10)); // child
            string result2 = codebook.Translate("Label", machine.Compute(40)); // adult
            string result3 = codebook.Translate("Label", machine.Compute(70)); // elder


            Assert.AreEqual(0, a);
            Assert.AreEqual(1, b);
            Assert.AreEqual(2, c);
            Assert.AreEqual("child", labela);
            Assert.AreEqual("adult", labelb);
            Assert.AreEqual("elder", labelc);

            Assert.AreEqual("child", result1);
            Assert.AreEqual("adult", result2);
            Assert.AreEqual("elder", result3);

        }
コード例 #9
0
ファイル: MainForm.cs プロジェクト: qusma/framework
        /// <summary>
        ///   Loads the stored gestures and learns a SVM using those data.
        /// </summary>
        /// 
        private void btnLearn_Click(object sender, EventArgs e)
        {
            if (gridSamples.Rows.Count == 0)
            {
                MessageBox.Show("Please load or insert some data first.");
                return;
            }

            BindingList<Sequence> samples = database.Samples;
            BindingList<String> classes = database.Classes;

            double[][] inputs = new double[samples.Count][];
            int[] outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = samples[i].Input;
                outputs[i] = samples[i].Output;
            }


            // Creates a new learning machine. Please note how the number of inputs is given
            // as zero: this means the machine will accept variable-length sequences as input.
            //
            svm = new MulticlassSupportVectorMachine(inputs: 0, 
                kernel: new DynamicTimeWarping(2), classes: classes.Count);


            // Create the learning algorithm to teach the multiple class classifier
            var teacher = new MulticlassSupportVectorLearning(svm, inputs, outputs)
            {
                // Setup the learning algorithm for each 1x1 subproblem
                Algorithm = (machine, classInputs, classOutputs, i, j) =>
                    new SequentialMinimalOptimization(machine, classInputs, classOutputs)
            };


            // Run the learning algorithm
            double error = teacher.Run();


            // Classify all training instances
            foreach (var sample in database.Samples)
            {
                sample.RecognizedAs = svm.Compute(sample.Input);
            }

            foreach (DataGridViewRow row in gridSamples.Rows)
            {
                var sample = row.DataBoundItem as Sequence;
                row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
                    Color.LightGreen : Color.White;
            }
        }
コード例 #10
0
ファイル: MainForm.cs プロジェクト: natepan/framework
        private void btnLearn_Click(object sender, EventArgs e)
        {
            if (gridSamples.Rows.Count == 0)
            {
                MessageBox.Show("Please load or insert some data first.");
                return;
            }

            BindingList<Sequence> samples = database.Samples;
            BindingList<String> classes = database.Classes;

            double[][] inputs = new double[samples.Count][];
            int[] outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = samples[i].Input;
                outputs[i] = samples[i].Output;
            }


            svm = new MulticlassSupportVectorMachine(0, new DynamicTimeWarping(2), classes.Count);


            // Create the learning algorithm for the ensemble classifier
            var teacher = new MulticlassSupportVectorLearning(svm, inputs, outputs);

            teacher.Algorithm = (machine, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(machine, classInputs, classOutputs);

            // Run the learning algorithm
            double error = teacher.Run();


            // Classify all training instances
            foreach (var sample in database.Samples)
            {
                sample.RecognizedAs = svm.Compute(sample.Input);
            }

            foreach (DataGridViewRow row in gridSamples.Rows)
            {
                var sample = row.DataBoundItem as Sequence;
                row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
                    Color.LightGreen : Color.White;
            }
        }
コード例 #11
0
        public void LinearTest()
        {

            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0 
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            int[] outputs = // those are the class labels
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // Create a new multi-class linear support vector machine for 3 classes
            var machine = new MulticlassSupportVectorMachine(inputs: 4, classes: 3);

            // Create a one-vs-one learning algorithm using LIBLINEAR's L2-loss SVC dual
            var teacher = new MulticlassSupportVectorLearning(machine, inputs, outputs)
            {
                Algorithm = (svm, classInputs, classOutputs, i, j) =>
                    new LinearDualCoordinateDescent(svm, classInputs, classOutputs)
                    {
                        Loss = Loss.L2
                    }
            };

            // Teach the machine
            double error = teacher.Run(); // should be 0.

            Assert.AreEqual(0, error);
            for (int i = 0; i < inputs.Length; i++)
            {
                error = machine.Compute(inputs[i]);
                double expected = outputs[i];
                Assert.AreEqual(expected, error);
            }
        }
コード例 #12
0
        public void LinearComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };


            var msvm = new MulticlassSupportVectorMachine(5, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new LinearCoordinateDescent(svm, classInputs, classOutputs)
                {
                    Complexity = 1
                };

            msvm.ParallelOptions.MaxDegreeOfParallelism = 1;
            smo.ParallelOptions.MaxDegreeOfParallelism = 1;

            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            double error = smo.Run();

            // Linear machines in compact form do not require kernel evaluations
            Assert.AreEqual(0, msvm.GetLastKernelEvaluations());

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(0, msvm.GetLastKernelEvaluations());
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(0, msvm.GetLastKernelEvaluations());
            }
        }
コード例 #13
0
        public void ComputeTest1()
        {
            double[][] inputs =
            {
                new double[] { 1, 4, 2, 0, 1 },
                new double[] { 1, 3, 2, 0, 1 },
                new double[] { 3, 0, 1, 1, 1 },
                new double[] { 3, 0, 1, 0, 1 },
                new double[] { 0, 5, 5, 5, 5 },
                new double[] { 1, 5, 5, 5, 5 },
                new double[] { 1, 0, 0, 0, 0 },
                new double[] { 1, 0, 0, 0, 0 },
            };

            int[] outputs =
            {
                0, 0,
                1, 1,
                2, 2,
                3, 3,
            };

            IKernel kernel = new Polynomial(2);
            var msvm = new MulticlassSupportVectorMachine(5, kernel, 4);
            var smo = new MulticlassSupportVectorLearning(msvm, inputs, outputs);
            smo.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            double error = smo.Run();

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Elimination);
                Assert.AreEqual(expected, actual);
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = outputs[i];
                double actual = msvm.Compute(inputs[i], MulticlassComputeMethod.Voting);
                Assert.AreEqual(expected, actual);
            }
        }