Пример #1
0
        public static void train_one(Problem prob, Parameters param, out double[] w, double Cp, double Cn)
        {
            double[][] inputs = prob.Inputs;
            int[] labels = prob.Outputs.Apply(x => x >= 0 ? 1 : -1);

            double eps = param.Tolerance;

            int pos = 0;
            for (int i = 0; i < labels.Length; i++)
                if (labels[i] >= 0) pos++;
            int neg = prob.Outputs.Length - pos;

            double primal_solver_tol = eps * Math.Max(Math.Min(pos, neg), 1.0) / prob.Inputs.Length;

            SupportVectorMachine svm = new SupportVectorMachine(prob.Dimensions);
            ISupportVectorMachineLearning teacher = null;


            switch (param.Solver)
            {
                case LibSvmSolverType.L2RegularizedLogisticRegression:

                    // l2r_lr_fun
                    teacher = new ProbabilisticNewtonMethod(svm, inputs, labels)
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; break;


                case LibSvmSolverType.L2RegularizedL2LossSvc:

                    // fun_obj=new l2r_l2_svc_fun(prob, C);
                    teacher = new LinearNewtonMethod(svm, inputs, labels)
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; break;


                case LibSvmSolverType.L2RegularizedL2LossSvcDual:

                    // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L2LOSS_SVC_DUAL);
                    teacher = new LinearCoordinateDescent(svm, inputs, labels)
                    {
                        Loss = Loss.L2,
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                    }; break;


                case LibSvmSolverType.L2RegularizedL1LossSvcDual:

                    // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L1LOSS_SVC_DUAL);
                    teacher = new LinearCoordinateDescent(svm, inputs, labels)
                    {
                        Loss = Loss.L1,
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                    }; break;


                case LibSvmSolverType.L1RegularizedLogisticRegression:

                    // solve_l1r_lr(&prob_col, w, primal_solver_tol, Cp, Cn);
                    teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels)
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; break;


                case LibSvmSolverType.L2RegularizedLogisticRegressionDual:

                    // solve_l2r_lr_dual(prob, w, eps, Cp, Cn);
                    teacher = new ProbabilisticDualCoordinateDescent(svm, inputs, labels)
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol,
                    }; break;
            }


            Trace.WriteLine("Training " + param.Solver);
            
            // run the learning algorithm
            var sw = Stopwatch.StartNew();
            double error = teacher.Run();
            sw.Stop();

            // save the solution
            w = svm.ToWeights();

            Trace.WriteLine(String.Format("Finished {0}: {1} in {2}", 
                param.Solver, error, sw.Elapsed));
        }
Пример #2
0
        public static LibSvmModel train(Problem prob, Parameters parameters)
        {
            double[] w;
            double Cp = parameters.Complexity;
            double Cn = parameters.Complexity;

            if (parameters.ClassWeights != null)
            {
                for (int i = 0; i < parameters.ClassLabels.Count; i++)
                {
                    if (parameters.ClassLabels[i] == -1)
                        Cn *= parameters.ClassWeights[i];

                    else if (parameters.ClassLabels[i] == +1)
                        Cn *= parameters.ClassWeights[i];
                }
            }

            train_one(prob, parameters, out w, Cp, Cn);

            return new LibSvmModel()
            {
                Dimension = prob.Dimensions,
                Classes = 2,
                Labels = new[] { +1, -1 },
                Solver = parameters.Solver,
                Weights = w,
                Bias = 0
            };
        }
Пример #3
0
        public static string check_parameter(Problem prob, Parameters param)
        {
            if (param.Epsilon <= 0)
                return "eps <= 0";

            if (param.Complexity <= 0)
                return "C <= 0";

            if (param.Epsilon < 0)
                return "p < 0";

            if (!Enum.IsDefined(typeof(LibSvmSolverType), param.Solver))
                return "unknown solver type";

            if (param.CrossValidation)
                return "cross-validation is not supported at this time.";

            return null;
        }
Пример #4
0
        public static Parameters parse_command_line(string[] args,
            out string input_file_name, out string model_file_name)
        {
            // default values
            var parameters = new Parameters()
            {
                Solver = LibSvmSolverType.L2RegularizedL1LossSvcDual,
                Complexity = 1,
                Tolerance = Double.PositiveInfinity,
                Epsilon = 0.1,
                Bias = -1
            };

            input_file_name = null;
            model_file_name = null;

            int i;

            try
            {
                // parse options
                for (i = 0; i < args.Length; i++)
                {
                    if (args[i][0] != '-')
                        break;

                    if (++i >= args.Length)
                        exit_with_help();

                    switch (args[i - 1][1])
                    {
                        case 's':
                            parameters.Solver = (LibSvmSolverType)Int32.Parse(args[i]);
                            break;

                        case 'c':
                            parameters.Complexity = Double.Parse(args[i]);
                            break;

                        case 'p':
                            parameters.Epsilon = Double.Parse(args[i]);
                            break;

                        case 'e':
                            parameters.Tolerance = Double.Parse(args[i]);
                            break;

                        case 'B':
                            parameters.Bias = Double.Parse(args[i]);
                            break;

                        case 'w':
                            parameters.ClassLabels.Add(Int32.Parse(args[i - 1][2].ToString()));
                            parameters.ClassWeights.Add(Double.Parse(args[i]));
                            break;

                        case 'v':
                            parameters.CrossValidation = true;
                            parameters.ValidationFolds = Int32.Parse(args[i]);
                            if (parameters.ValidationFolds < 2)
                            {
                                Console.WriteLine("n-fold cross validation: n must >= 2");
                                exit_with_help();
                            }
                            break;

                        case 'q':
                            Trace.Listeners.Clear();
                            i--;
                            break;

                        default:
                            Console.WriteLine("unknown option: -%c" + args[i - 1][1]);
                            exit_with_help();
                            break;
                    }
                }

                // determine filenames
                if (i >= args.Length)
                    exit_with_help();

                input_file_name = args[i];

                if (i < args.Length - 1)
                    model_file_name = args[i + 1];
            }
            catch
            {
                exit_with_help();
            }

            if (parameters.Tolerance == Double.PositiveInfinity)
            {
                switch (parameters.Solver)
                {
                    case LibSvmSolverType.L2RegularizedLogisticRegression:
                    case LibSvmSolverType.L2RegularizedL2LossSvc:
                        parameters.Tolerance = 0.01;
                        break;
                    case LibSvmSolverType.L2RegularizedL2LossSvr:
                        parameters.Tolerance = 0.001;
                        break;
                    case LibSvmSolverType.L2RegularizedL2LossSvcDual:
                    case LibSvmSolverType.L2RegularizedL1LossSvcDual:
                    case LibSvmSolverType.MulticlassSvmCrammerSinger:
                    case LibSvmSolverType.L2RegularizedLogisticRegressionDual:
                        parameters.Tolerance = 0.1;
                        break;
                    case LibSvmSolverType.L1RegularizedL2LossSvc:
                    case LibSvmSolverType.L1RegularizedLogisticRegression:
                        parameters.Tolerance = 0.01;
                        break;
                    case LibSvmSolverType.L2RegularizedL1LossSvrDual:
                    case LibSvmSolverType.L2RegularizedL2LossSvrDual:
                        parameters.Tolerance = 0.1;
                        break;
                }
            }

            return parameters;
        }
Пример #5
0
        private static ISupervisedLearning<SupportVectorMachine, double[], int> create(
            Parameters param, double Cp, double Cn, double[][] inputs, int[] outputs)
        {
            double eps = param.Tolerance;
            int n = outputs.Length;

            int pos = 0;
            for (int i = 0; i < outputs.Length; i++)
            {
                if (outputs[i] >= 0) 
                    pos++;
            }
            int neg = n - pos;

            double primal_solver_tol = eps * Math.Max(Math.Min(pos, neg), 1.0) / n;

            switch (param.Solver)
            {
                case LibSvmSolverType.L2RegularizedLogisticRegression:
                    // l2r_lr_fun
                    return new ProbabilisticNewtonMethod()
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; 

                case LibSvmSolverType.L2RegularizedL2LossSvc:
                    // fun_obj=new l2r_l2_svc_fun(prob, C);
                    return new LinearNewtonMethod()
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; 

                case LibSvmSolverType.L2RegularizedL2LossSvcDual:
                    // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L2LOSS_SVC_DUAL);
                    return new LinearDualCoordinateDescent()
                    {
                        Loss = Loss.L2,
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                    }; 

                case LibSvmSolverType.L2RegularizedL1LossSvcDual:
                    // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L1LOSS_SVC_DUAL);
                    return new LinearDualCoordinateDescent()
                    {
                        Loss = Loss.L1,
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                    }; 

                case LibSvmSolverType.L1RegularizedLogisticRegression:
                    // solve_l1r_lr(&prob_col, w, primal_solver_tol, Cp, Cn);
                    return new ProbabilisticCoordinateDescent()
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol
                    }; 

                case LibSvmSolverType.L2RegularizedLogisticRegressionDual:
                    // solve_l2r_lr_dual(prob, w, eps, Cp, Cn);
                    return new ProbabilisticDualCoordinateDescent()
                    {
                        PositiveWeight = Cp,
                        NegativeWeight = Cn,
                        Tolerance = primal_solver_tol,
                    }; 
            }

            throw new InvalidOperationException("Unknown solver type: {0}".Format(param.Solver));
        }
Пример #6
0
        public static void train_one(Problem prob, Parameters param, out double[] w, double Cp, double Cn)
        {
            double[][] inputs = prob.Inputs;
            int[] labels = prob.Outputs.Apply(x => x >= 0 ? 1 : -1);

            // Create the learning algorithm from the parameters
            var teacher = create(param, Cp, Cn, inputs, labels);

            Trace.WriteLine("Training " + param.Solver);
            
            // Run the learning algorithm
            var sw = Stopwatch.StartNew();
            SupportVectorMachine svm = teacher.Learn(inputs, labels);
            sw.Stop();

            double error = new HingeLoss(labels).Loss(svm.Score(inputs));

            // Save the solution
            w = svm.ToWeights();

            Trace.WriteLine(String.Format("Finished {0}: {1} in {2}", 
                param.Solver, error, sw.Elapsed));
        }