public void RunTest2() { var dataset = SequentialMinimalOptimizationTest.yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray(); int[] labels = dataset.GetColumn(2).ToInt32(); var svm = new SupportVectorMachine(inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; double error = teacher.Run(); double[] weights = svm.ToWeights(); Assert.AreEqual(0.12, error); Assert.AreEqual(3, weights.Length); Assert.AreEqual(-1.3231203367770932, weights[0], 1e-8); Assert.AreEqual(-3.0227742288788493, weights[1], 1e-8); Assert.AreEqual(-0.73074823290553259, weights[2], 1e-8); Assert.AreEqual(svm.Threshold, weights[0]); }
public void RunTest() { var dataset = SequentialMinimalOptimizationTest.yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); Accord.Math.Random.Generator.Seed = 0; var svm = new SupportVectorMachine(inputs: 2); var teacher = new ProbabilisticDualCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.UseComplexityHeuristic = true; Assert.IsFalse(svm.IsProbabilistic); double error = teacher.Run(); Assert.IsTrue(svm.IsProbabilistic); double[] weights = svm.ToWeights(); Assert.AreEqual(0.13, error); Assert.AreEqual(3, weights.Length); Assert.AreEqual(-0.52913278486359605, weights[0], 1e-4); Assert.AreEqual(-1.6426069611746976, weights[1], 1e-4); Assert.AreEqual(-0.77766953652287762, weights[2], 1e-4); Assert.AreEqual(svm.Threshold, weights[0]); }
public void RunTest() { double[][] input = { new double[] { 55, 0 }, // 0 - no cancer new double[] { 28, 0 }, // 0 new double[] { 65, 1 }, // 0 new double[] { 46, 0 }, // 1 - have cancer new double[] { 86, 1 }, // 1 new double[] { 56, 1 }, // 1 new double[] { 85, 0 }, // 0 new double[] { 33, 0 }, // 0 new double[] { 21, 1 }, // 0 new double[] { 42, 1 }, // 1 }; double[] output = { 0, 0, 0, 1, 1, 1, 0, 0, 0, 1 }; int[] labels = output.Apply(x => x > 0 ? +1 : -1); var svm = new SupportVectorMachine(inputs: 2); var teacher = new ProbabilisticNewtonMethod(svm, input, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; Assert.IsFalse(svm.IsProbabilistic); double error = teacher.Run(); Assert.IsTrue(svm.IsProbabilistic); Assert.AreEqual(0.2, error); Assert.AreEqual(0.02064511826338301, svm.SupportVectors[0][0], 1e-10); Assert.AreEqual(1.767889310996118, svm.SupportVectors[0][1], 1e-10); Assert.AreEqual(-2.4577464317497455, svm.Threshold, 1e-10); var regression = LogisticRegression.FromWeights(svm.ToWeights()); double[] actual = new double[output.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = regression.Compute(input[i]); } double ageOdds = regression.GetOddsRatio(1); // 1.0208597028836701 double smokeOdds = regression.GetOddsRatio(2); // 5.8584748789881331 Assert.AreEqual(1.0208597028836701, ageOdds, 1e-4); Assert.AreEqual(5.8584748789881331, smokeOdds, 1e-4); Assert.AreEqual(-2.4577464307294092, regression.Intercept, 1e-8); Assert.AreEqual(-2.4577464307294092, regression.Coefficients[0], 1e-8); Assert.AreEqual(0.020645118265359252, regression.Coefficients[1], 1e-8); Assert.AreEqual(1.7678893101571855, regression.Coefficients[2], 1e-8); }
public void KernelTest1() { var dataset = SequentialMinimalOptimizationTest.GetYingYang(); double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); double e1, e2; double[] w1, w2; { Accord.Math.Random.Generator.Seed = 0; var svm = new SupportVectorMachine(inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; e1 = teacher.Run(); w1 = svm.ToWeights(); } { Accord.Math.Random.Generator.Seed = 0; var svm = new KernelSupportVectorMachine(new Linear(0), inputs: 2); var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.Complexity = 1e+10; e2 = teacher.Run(); w2 = svm.ToWeights(); } Assert.AreEqual(e1, e2); Assert.AreEqual(w1.Length, w2.Length); Assert.AreEqual(w1[0], w2[0], 1e-8); Assert.AreEqual(w1[1], w2[1], 1e-8); Assert.AreEqual(w1[2], w2[2], 1e-8); }
public static void train_one(Problem prob, Parameters param, out double[] w, double Cp, double Cn) { double[][] inputs = prob.Inputs; int[] labels = prob.Outputs.Apply(x => x >= 0 ? 1 : -1); // Create the learning algorithm from the parameters var teacher = create(param, Cp, Cn, inputs, labels); Trace.WriteLine("Training " + param.Solver); // Run the learning algorithm var sw = Stopwatch.StartNew(); SupportVectorMachine svm = teacher.Learn(inputs, labels); sw.Stop(); double error = new HingeLoss(labels).Loss(svm.Score(inputs)); // Save the solution w = svm.ToWeights(); Trace.WriteLine(String.Format("Finished {0}: {1} in {2}", param.Solver, error, sw.Elapsed)); }
public static void train_one(Problem prob, Parameters param, out double[] w, double Cp, double Cn) { double[][] inputs = prob.Inputs; int[] labels = prob.Outputs.Apply(x => x >= 0 ? 1 : -1); double eps = param.Tolerance; int pos = 0; for (int i = 0; i < labels.Length; i++) { if (labels[i] >= 0) { pos++; } } int neg = prob.Outputs.Length - pos; double primal_solver_tol = eps * Math.Max(Math.Min(pos, neg), 1.0) / prob.Inputs.Length; SupportVectorMachine svm = new SupportVectorMachine(prob.Dimensions); ISupportVectorMachineLearning teacher = null; switch (param.Solver) { case LibSvmSolverType.L2RegularizedLogisticRegression: // l2r_lr_fun teacher = new ProbabilisticNewtonMethod(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedL2LossSvc: // fun_obj=new l2r_l2_svc_fun(prob, C); teacher = new LinearNewtonMethod(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedL2LossSvcDual: // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L2LOSS_SVC_DUAL); teacher = new LinearDualCoordinateDescent(svm, inputs, labels) { Loss = Loss.L2, PositiveWeight = Cp, NegativeWeight = Cn, }; break; case LibSvmSolverType.L2RegularizedL1LossSvcDual: // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L1LOSS_SVC_DUAL); teacher = new LinearDualCoordinateDescent(svm, inputs, labels) { Loss = Loss.L1, PositiveWeight = Cp, NegativeWeight = Cn, }; break; case LibSvmSolverType.L1RegularizedLogisticRegression: // solve_l1r_lr(&prob_col, w, primal_solver_tol, Cp, Cn); teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedLogisticRegressionDual: // solve_l2r_lr_dual(prob, w, eps, Cp, Cn); teacher = new ProbabilisticDualCoordinateDescent(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol, }; break; } Trace.WriteLine("Training " + param.Solver); // run the learning algorithm var sw = Stopwatch.StartNew(); double error = teacher.Run(); sw.Stop(); // save the solution w = svm.ToWeights(); Trace.WriteLine(String.Format("Finished {0}: {1} in {2}", param.Solver, error, sw.Elapsed)); }