public void LearnTest() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = { -1, 1, 1, -1 }; var kernel = new Polynomial(2, 0.0); double[][] augmented = new double[inputs.Length][]; for (int i = 0; i < inputs.Length; i++) augmented[i] = kernel.Transform(inputs[i]); SupportVectorMachine machine = new SupportVectorMachine(augmented[0].Length); // Create the Least Squares Support Vector Machine teacher var learn = new LinearCoordinateDescent(machine, augmented, xor); // Run the learning algorithm learn.Run(); int[] output = augmented.Apply(p => Math.Sign(machine.Compute(p))); for (int i = 0; i < output.Length; i++) Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i])); }
public void ComputeTest5() { var dataset = SequentialMinimalOptimizationTest.yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); var kernel = new Polynomial(2, 1); Accord.Math.Tools.SetupGenerator(0); var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); var smo = new LinearCoordinateDescent(machine, projection, labels) { Complexity = 1000000, Tolerance = 1e-15 }; double error = smo.Run(); Assert.AreEqual(1000000.0, smo.Complexity, 1e-15); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(projection[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(6, matrix.FalseNegatives); Assert.AreEqual(7, matrix.FalsePositives); Assert.AreEqual(44, matrix.TruePositives); Assert.AreEqual(43, matrix.TrueNegatives); }
public static void train_one(Problem prob, Parameters param, out double[] w, double Cp, double Cn) { double[][] inputs = prob.Inputs; int[] labels = prob.Outputs.Apply(x => x >= 0 ? 1 : -1); double eps = param.Tolerance; int pos = 0; for (int i = 0; i < labels.Length; i++) if (labels[i] >= 0) pos++; int neg = prob.Outputs.Length - pos; double primal_solver_tol = eps * Math.Max(Math.Min(pos, neg), 1.0) / prob.Inputs.Length; SupportVectorMachine svm = new SupportVectorMachine(prob.Dimensions); ISupportVectorMachineLearning teacher = null; switch (param.Solver) { case LibSvmSolverType.L2RegularizedLogisticRegression: // l2r_lr_fun teacher = new ProbabilisticNewtonMethod(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedL2LossSvc: // fun_obj=new l2r_l2_svc_fun(prob, C); teacher = new LinearNewtonMethod(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedL2LossSvcDual: // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L2LOSS_SVC_DUAL); teacher = new LinearCoordinateDescent(svm, inputs, labels) { Loss = Loss.L2, PositiveWeight = Cp, NegativeWeight = Cn, }; break; case LibSvmSolverType.L2RegularizedL1LossSvcDual: // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L1LOSS_SVC_DUAL); teacher = new LinearCoordinateDescent(svm, inputs, labels) { Loss = Loss.L1, PositiveWeight = Cp, NegativeWeight = Cn, }; break; case LibSvmSolverType.L1RegularizedLogisticRegression: // solve_l1r_lr(&prob_col, w, primal_solver_tol, Cp, Cn); teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedLogisticRegressionDual: // solve_l2r_lr_dual(prob, w, eps, Cp, Cn); teacher = new ProbabilisticDualCoordinateDescent(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol, }; break; } Trace.WriteLine("Training " + param.Solver); // run the learning algorithm var sw = Stopwatch.StartNew(); double error = teacher.Run(); sw.Stop(); // save the solution w = svm.ToWeights(); Trace.WriteLine(String.Format("Finished {0}: {1} in {2}", param.Solver, error, sw.Elapsed)); }
private static void linearSvm(double[][] inputs, int[] outputs) { // Create a linear binary machine with 2 inputs var svm = new SupportVectorMachine(inputs: 2); // Create a L2-regularized L2-loss optimization algorithm for // the dual form of the learning problem. This is *exactly* the // same method used by LIBLINEAR when specifying -s 1 in the // command line (i.e. L2R_L2LOSS_SVC_DUAL). // var teacher = new LinearCoordinateDescent(svm, inputs, outputs); // Teach the vector machine double error = teacher.Run(); // Classify the samples using the model int[] answers = inputs.Apply(svm.Compute).Apply(System.Math.Sign); // Plot the results ScatterplotBox.Show("Expected results", inputs, outputs); ScatterplotBox.Show("LinearSVM results", inputs, answers); // Grab the index of multipliers higher than 0 int[] idx = teacher.Lagrange.Find(x => x > 0); // Select the input vectors for those double[][] sv = inputs.Submatrix(idx); // Plot the support vectors selected by the machine ScatterplotBox.Show("Support vectors", sv).Hold(); }
public void ComputeTest5() { var dataset = SequentialMinimalOptimizationTest.yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); var kernel = new Polynomial(2, 0); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.UseComplexityHeuristic = true; double error = smo.Run(); Assert.AreEqual(0.11714451552090824, smo.Complexity); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(inputs[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(20, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(30, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); } { Accord.Math.Tools.SetupGenerator(0); var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); var smo = new LinearCoordinateDescent(machine, projection, labels); smo.UseComplexityHeuristic = true; smo.Tolerance = 0.01; double error = smo.Run(); Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(projection[i])); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(17, matrix.FalseNegatives); Assert.AreEqual(1, matrix.FalsePositives); Assert.AreEqual(33, matrix.TruePositives); Assert.AreEqual(49, matrix.TrueNegatives); } { Accord.Math.Tools.SetupGenerator(0); var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); var smo = new LinearCoordinateDescent(machine, projection, labels); smo.UseComplexityHeuristic = true; smo.Loss = Loss.L1; double error = smo.Run(); Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) actual[i] = Math.Sign(machine.Compute(kernel.Transform(inputs[i]))); ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(20, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(30, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); } }