private static void sparseLogistic(Sparse <double>[] inputs, double[] doubleOutputs) { // The dataset has output labels as 4 and 2. We have to convert them // into negative and positive labels so they can be properly processed. // bool[] outputs = doubleOutputs.Apply(x => x == 2.0 ? false : true); // Create a probabilistic SVM that can output probabilities besides a decision var teacher = new ProbabilisticDualCoordinateDescent <Linear, Sparse <double> >() { Complexity = 1000, Tolerance = 1e-5 }; // Use the learning algorithm to Learn var svm = teacher.Learn(inputs, outputs); // Transform the machine into a dense logistic regression: var lr = LogisticRegression.FromWeights(svm.ToWeights()); // Compute the machine's answers bool[] svmAnswers = svm.Decide(inputs); // Compute the machine probability estimates: double[] svmProbability = svm.Probability(inputs); // Compute the logistic regression's answers: bool[] lrAnswers = lr.Decide(inputs.ToDense()); // Compute the logistic regression probability estimates: double[] lrProbability = lr.Probability(inputs.ToDense()); // They should be equal for both the SVM and the LR }
public void RunTest() { var dataset = SequentialMinimalOptimizationTest.yinyang; double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); Accord.Math.Random.Generator.Seed = 0; var svm = new SupportVectorMachine(inputs: 2); var teacher = new ProbabilisticDualCoordinateDescent(svm, inputs, labels); teacher.Tolerance = 1e-10; teacher.UseComplexityHeuristic = true; Assert.IsFalse(svm.IsProbabilistic); double error = teacher.Run(); Assert.IsTrue(svm.IsProbabilistic); double[] weights = svm.ToWeights(); Assert.AreEqual(0.13, error); Assert.AreEqual(3, weights.Length); Assert.AreEqual(-0.52913278486359605, weights[0], 1e-4); Assert.AreEqual(-1.6426069611746976, weights[1], 1e-4); Assert.AreEqual(-0.77766953652287762, weights[2], 1e-4); Assert.AreEqual(svm.Threshold, weights[0]); }
public static void train_one(Problem prob, Parameters param, out double[] w, double Cp, double Cn) { double[][] inputs = prob.Inputs; int[] labels = prob.Outputs.Apply(x => x >= 0 ? 1 : -1); double eps = param.Tolerance; int pos = 0; for (int i = 0; i < labels.Length; i++) { if (labels[i] >= 0) { pos++; } } int neg = prob.Outputs.Length - pos; double primal_solver_tol = eps * Math.Max(Math.Min(pos, neg), 1.0) / prob.Inputs.Length; SupportVectorMachine svm = new SupportVectorMachine(prob.Dimensions); ISupportVectorMachineLearning teacher = null; switch (param.Solver) { case LibSvmSolverType.L2RegularizedLogisticRegression: // l2r_lr_fun teacher = new ProbabilisticNewtonMethod(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedL2LossSvc: // fun_obj=new l2r_l2_svc_fun(prob, C); teacher = new LinearNewtonMethod(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedL2LossSvcDual: // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L2LOSS_SVC_DUAL); teacher = new LinearDualCoordinateDescent(svm, inputs, labels) { Loss = Loss.L2, PositiveWeight = Cp, NegativeWeight = Cn, }; break; case LibSvmSolverType.L2RegularizedL1LossSvcDual: // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L1LOSS_SVC_DUAL); teacher = new LinearDualCoordinateDescent(svm, inputs, labels) { Loss = Loss.L1, PositiveWeight = Cp, NegativeWeight = Cn, }; break; case LibSvmSolverType.L1RegularizedLogisticRegression: // solve_l1r_lr(&prob_col, w, primal_solver_tol, Cp, Cn); teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedLogisticRegressionDual: // solve_l2r_lr_dual(prob, w, eps, Cp, Cn); teacher = new ProbabilisticDualCoordinateDescent(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol, }; break; } Trace.WriteLine("Training " + param.Solver); // run the learning algorithm var sw = Stopwatch.StartNew(); double error = teacher.Run(); sw.Stop(); // save the solution w = svm.ToWeights(); Trace.WriteLine(String.Format("Finished {0}: {1} in {2}", param.Solver, error, sw.Elapsed)); }
public void logistic_regression_test() { Accord.Math.Random.Generator.Seed = 0; #region doc_logreg // Declare some training data. This is exactly the same // data used in the LogisticRegression documentation page // Suppose we have the following data about some patients. // The first variable is continuous and represent patient // age. The second variable is dichotomic and give whether // they smoke or not (This is completely fictional data). // We also know if they have had lung cancer or not, and // we would like to know whether smoking has any connection // with lung cancer (This is completely fictional data). double[][] input = { // age, smokes?, had cancer? new double[] { 55, 0 }, // false - no cancer new double[] { 28, 0 }, // false new double[] { 65, 1 }, // false new double[] { 46, 0 }, // true - had cancer new double[] { 86, 1 }, // true new double[] { 56, 1 }, // true new double[] { 85, 0 }, // false new double[] { 33, 0 }, // false new double[] { 21, 1 }, // false new double[] { 42, 1 }, // true }; double[] output = // Whether each patient had lung cancer or not { 0, 0, 0, 1, 1, 1, 0, 0, 0, 1 }; // Create the probabilistic-SVM learning algorithm var teacher = new ProbabilisticDualCoordinateDescent() { Tolerance = 1e-10, Complexity = 1e+10, // learn a hard-margin model }; // Learn the support vector machine var svm = teacher.Learn(input, output); // Convert the svm to logistic regression var regression = (LogisticRegression)svm; // Compute the predicted outcome for inputs bool[] predicted = regression.Decide(input); // Compute probability scores for the outputs double[] scores = regression.Score(input); // Compute odds-ratio as in the LogisticRegression example double ageOdds = regression.GetOddsRatio(1); // 1.0430443799578411 double smokeOdds = regression.GetOddsRatio(2); // 7.2414593749145508 // Compute the classification error as in SVM example double error = new ZeroOneLoss(output).Loss(predicted); #endregion var rsvm = (SupportVectorMachine)regression; Assert.AreEqual(2, rsvm.NumberOfInputs); Assert.AreEqual(2, rsvm.NumberOfOutputs); double[] svmpred = svm.Probability(input); Assert.IsTrue(scores.IsEqual(svmpred, 1e-10)); Assert.AreEqual(0.4, error); Assert.AreEqual(1.0430443799578411, ageOdds, 1e-4); Assert.AreEqual(7.2414593749145508, smokeOdds, 1e-4); Assert.AreEqual(-21.4120677536517, regression.Intercept, 1e-8); Assert.AreEqual(-21.4120677536517, regression.Coefficients[0], 1e-8); Assert.AreEqual(0.042143725408546939, regression.Coefficients[1], 1e-8); Assert.AreEqual(1.9798227572056906, regression.Coefficients[2], 1e-8); }