Пример #1
0
        private static void and()
        {
            // Create a simple binary AND
            // classification problem:

            double[][] problem =
            {
                //             a    b    a + b
                new double[] { 0,   0,     0    },
                new double[] { 0,   1,     0    },
                new double[] { 1,   0,     0    },
                new double[] { 1,   1,     1    },
            };

            // Get the two first columns as the problem
            // inputs and the last column as the output
            
            // input columns
            double[][] inputs = problem.GetColumns(0, 1);

            // output column
            int[] outputs = problem.GetColumn(2).ToInt32();

            // Plot the problem on screen
            ScatterplotBox.Show("AND", inputs, outputs).Hold();


            // However, SVMs expect the output value to be
            // either -1 or +1. As such, we have to convert
            // it so the vector contains { -1, -1, -1, +1 }:
            //
            outputs = outputs.Apply(x => x == 0 ? -1 : 1);


            // Create a new linear-SVM for two inputs (a and b)
            SupportVectorMachine svm = new SupportVectorMachine(inputs: 2);

            // Create a L2-regularized L2-loss support vector classification
            var teacher = new LinearDualCoordinateDescent(svm, inputs, outputs)
            {
                Loss = Loss.L2,
                Complexity = 1000,
                Tolerance = 1e-5
            };

            // Learn the machine
            double error = teacher.Run(computeError: true);

            // Compute the machine's answers for the learned inputs
            int[] answers = inputs.Apply(x => Math.Sign(svm.Compute(x)));

            // Plot the results
            ScatterplotBox.Show("SVM's answer", inputs, answers).Hold();
        }
        public void LearnTest()
        {

            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] xor =
            {
                -1,
                 1,
                 1,
                -1
            };

            var kernel = new Polynomial(2, 0.0);

            double[][] augmented = new double[inputs.Length][];
            for (int i = 0; i < inputs.Length; i++)
                augmented[i] = kernel.Transform(inputs[i]);

            SupportVectorMachine machine = new SupportVectorMachine(augmented[0].Length);

            // Create the Least Squares Support Vector Machine teacher
            var learn = new LinearDualCoordinateDescent(machine, augmented, xor);

            // Run the learning algorithm
            double error = learn.Run();

            Assert.AreEqual(0, error);

            int[] output = augmented.Apply(p => Math.Sign(machine.Compute(p)));
            for (int i = 0; i < output.Length; i++)
                Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
        }
        public void ComputeTest5()
        {
            var dataset = SequentialMinimalOptimizationTest.yinyang;

            double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged();
            int[] labels = dataset.GetColumn(2).ToInt32();

            var kernel = new Polynomial(2, 0);

            {
                var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
                var smo = new SequentialMinimalOptimization(machine, inputs, labels);
                smo.UseComplexityHeuristic = true;

                double error = smo.Run();

                Assert.AreEqual(0.2, error);

                Assert.AreEqual(0.11714451552090824, smo.Complexity);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(inputs[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(20, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives);
                Assert.AreEqual(30, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives);
            }

            {
                Accord.Math.Tools.SetupGenerator(0);
                var projection = inputs.Apply(kernel.Transform);
                var machine = new SupportVectorMachine(projection[0].Length);
                var smo = new LinearDualCoordinateDescent(machine, projection, labels);
                smo.UseComplexityHeuristic = true;
                smo.Tolerance = 0.01;

                double error = smo.Run();

                Assert.AreEqual(0.18, error);

                Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(projection[i]));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(17, matrix.FalseNegatives);
                Assert.AreEqual(1, matrix.FalsePositives);
                Assert.AreEqual(33, matrix.TruePositives);
                Assert.AreEqual(49, matrix.TrueNegatives);
            }

            {
                Accord.Math.Random.Generator.Seed = 0;
                var projection = inputs.Apply(kernel.Transform);
                var machine = new SupportVectorMachine(projection[0].Length);
                var smo = new LinearDualCoordinateDescent(machine, projection, labels);
                smo.UseComplexityHeuristic = true;
                smo.Loss = Loss.L1;

                double error = smo.Run();

                Assert.AreEqual(0.2, error);

                Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15);

                int[] actual = new int[labels.Length];
                for (int i = 0; i < actual.Length; i++)
                    actual[i] = Math.Sign(machine.Compute(kernel.Transform(inputs[i])));

                ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
                Assert.AreEqual(20, matrix.FalseNegatives);
                Assert.AreEqual(0, matrix.FalsePositives);
                Assert.AreEqual(30, matrix.TruePositives);
                Assert.AreEqual(50, matrix.TrueNegatives);
            }
        }
Пример #4
0
        public static void train_one(Problem prob, Parameters param, out double[] w, double Cp, double Cn)
        {
            double[][] inputs = prob.Inputs;
            int[] labels = prob.Outputs.Apply(x => x >= 0 ? 1 : -1);

            double eps = param.Tolerance;

            int pos = 0;
            for (int i = 0; i < labels.Length; i++)
                if (labels[i] >= 0) pos++;
            int neg = prob.Outputs.Length - pos;

            double primal_solver_tol = eps * Math.Max(Math.Min(pos, neg), 1.0) / prob.Inputs.Length;

            SupportVectorMachine svm = new SupportVectorMachine(prob.Dimensions);
            ISupportVectorMachineLearning teacher = null;


            switch (param.Solver)
            {
                case LibSvmSolverType.L2RegularizedLogisticRegression:

                    // l2r_lr_fun
                    teacher = new ProbabilisticNewtonMethod(svm, inputs, labels)
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; break;


                case LibSvmSolverType.L2RegularizedL2LossSvc:

                    // fun_obj=new l2r_l2_svc_fun(prob, C);
                    teacher = new LinearNewtonMethod(svm, inputs, labels)
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; break;


                case LibSvmSolverType.L2RegularizedL2LossSvcDual:

                    // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L2LOSS_SVC_DUAL);
                    teacher = new LinearDualCoordinateDescent(svm, inputs, labels)
                    {
                        Loss = Loss.L2,
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                    }; break;


                case LibSvmSolverType.L2RegularizedL1LossSvcDual:

                    // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L1LOSS_SVC_DUAL);
                    teacher = new LinearDualCoordinateDescent(svm, inputs, labels)
                    {
                        Loss = Loss.L1,
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                    }; break;


                case LibSvmSolverType.L1RegularizedLogisticRegression:

                    // solve_l1r_lr(&prob_col, w, primal_solver_tol, Cp, Cn);
                    teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels)
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; break;


                case LibSvmSolverType.L2RegularizedLogisticRegressionDual:

                    // solve_l2r_lr_dual(prob, w, eps, Cp, Cn);
                    teacher = new ProbabilisticDualCoordinateDescent(svm, inputs, labels)
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol,
                    }; break;
            }


            Trace.WriteLine("Training " + param.Solver);
            
            // run the learning algorithm
            var sw = Stopwatch.StartNew();
            double error = teacher.Run();
            sw.Stop();

            // save the solution
            w = svm.ToWeights();

            Trace.WriteLine(String.Format("Finished {0}: {1} in {2}", 
                param.Solver, error, sw.Elapsed));
        }
Пример #5
0
        private static void xor()
        {
            // Create a simple binary XOR
            // classification problem:

            double[][] problem =
            {
                //             a    b    a XOR b
                new double[] { 0,   0,      0    },
                new double[] { 0,   1,      1    },
                new double[] { 1,   0,      1    },
                new double[] { 1,   1,      0    },
            };

            // Get the two first columns as the problem
            // inputs and the last column as the output

            // input columns
            double[][] inputs = problem.GetColumns(0, 1);

            // output column
            int[] outputs = problem.GetColumn(2).ToInt32();

            // Plot the problem on screen
            ScatterplotBox.Show("XOR", inputs, outputs).Hold();


            // However, SVMs expect the output value to be
            // either -1 or +1. As such, we have to convert
            // it so the vector contains { -1, -1, -1, +1 }:
            //
            outputs = outputs.Apply(x => x == 0 ? -1 : 1);


            // Create a new linear-SVM for two inputs (a and b)
            SupportVectorMachine svm = new SupportVectorMachine(inputs: 2);

            // Create a L2-regularized L2-loss support vector classification
            var teacher = new LinearDualCoordinateDescent(svm, inputs, outputs)
            {
                Loss = Loss.L2,
                Complexity = 1000,
                Tolerance = 1e-5
            };

            // Learn the machine
            double error = teacher.Run(computeError: true);

            // Compute the machine's answers for the learned inputs
            int[] answers = inputs.Apply(x => Math.Sign(svm.Compute(x)));

            // Plot the results
            ScatterplotBox.Show("SVM's answer", inputs, answers).Hold();

            // Use an explicit kernel expansion to transform the 
            // non-linear classification problem into a linear one
            //
            // Create a quadratic kernel
            Quadratic quadratic = new Quadratic(constant: 1);
            
            // Project the inptus into a higher dimensionality space
            double[][] expansion = inputs.Apply(quadratic.Transform);


            
            // Create a new linear-SVM for the transformed input space
            svm = new SupportVectorMachine(inputs: expansion[0].Length);

            // Create the same learning algorithm in the expanded input space
            teacher = new LinearDualCoordinateDescent(svm, expansion, outputs)
            {
                Loss = Loss.L2,
                Complexity = 1000,
                Tolerance = 1e-5
            };

            // Learn the machine
            error = teacher.Run(computeError: true); 

            // Compute the machine's answers for the learned inputs
            answers = expansion.Apply(x => Math.Sign(svm.Compute(x)));

            // Plot the results
            ScatterplotBox.Show("SVM's answer", inputs, answers).Hold();
        }
Пример #6
0
        private static void cancer()
        {
            // Create a new LibSVM sparse format data reader
            // to read the Wisconsin's Breast Cancer dataset
            //
            var reader = new SparseReader("examples-sparse.txt");

            int[] outputs; // Read the classification problem into dense memory
            double[][] inputs = reader.ReadToEnd(sparse: false, labels: out outputs);

            // The dataset has output labels as 4 and 2. We have to convert them
            // into negative and positive labels so they can be properly processed.
            //
            outputs = outputs.Apply(x => x == 2 ? -1 : +1);

            // Create a new linear-SVM for the problem dimensions
            var svm = new SupportVectorMachine(inputs: reader.Dimensions);

            // Create a learning algorithm for the problem's dimensions
            var teacher = new LinearDualCoordinateDescent(svm, inputs, outputs)
            {
                Loss = Loss.L2,
                Complexity = 1000,
                Tolerance = 1e-5
            };

            // Learn the classification
            double error = teacher.Run();

            // Compute the machine's answers for the learned inputs
            int[] answers = inputs.Apply(x => Math.Sign(svm.Compute(x)));

            // Create a confusion matrix to show the machine's performance
            var m = new ConfusionMatrix(predicted: answers, expected: outputs);

            // Show it onscreen
            DataGridBox.Show(new ConfusionMatrixView(m));
        }