public svm_model initial(List <Dictionary <string, double> > docWordDicList, Dictionary <string, int> dictionary, List <int> trainingAnswer, double c, double gamma)
        {
            Console.WriteLine("==> Starting training...");

            svm_problem   prob  = gen_svm_training_data(docWordDicList, dictionary, trainingAnswer);
            svm_parameter param = new svm_parameter();

            // set the default setting value
            param.svm_type    = svm_parameter.C_SVC;
            param.kernel_type = svm_parameter.RBF;
            param.degree      = 3;
            param.gamma       = gamma;
            param.coef0       = 0;
            param.nu          = 0.5;
            param.cache_size  = 100;
            param.C           = c;
            param.eps         = 1e-3;
            param.p           = 0.1;
            param.shrinking   = 1;
            param.probability = 0;
            param.nr_weight   = 0;

            svm_model model = svm.svm_train(prob, param);

            svm.svm_save_model(SVM_MODEL_FILE_NAME, model);

            Console.WriteLine("==> Training done!!");
            return(model);
        }
Example #2
0
        public static SVMParameter Convert(svm_parameter x)
        {
            SVMParameter y = new SVMParameter();

            y.Type        = (SVMType)x.svm_type;
            y.Kernel      = (SVMKernelType)x.kernel_type;
            y.Degree      = x.degree;
            y.Gamma       = x.gamma;
            y.Coef0       = x.coef0;
            y.CacheSize   = x.cache_size;
            y.Eps         = x.eps;
            y.C           = x.C;
            y.Nu          = x.nu;
            y.P           = x.p;
            y.Shrinking   = x.shrinking != 0;
            y.Probability = x.probability != 0;

            int length = x.nr_weight;

            y.WeightLabels = new int[length];
            if (length > 0)
            {
                Marshal.Copy(x.weight_label, y.WeightLabels, 0, length);
            }

            y.Weights = new double[length];
            if (length > 0)
            {
                Marshal.Copy(x.weight, y.Weights, 0, length);
            }

            return(y);
        }
Example #3
0
        /// <summary>
        /// Evaluate the error for the specified model.
        /// </summary>
        ///
        /// <param name="param">The params for the SVN.</param>
        /// <param name="prob">The problem to evaluate.</param>
        /// <param name="target">The output values from the SVN.</param>
        /// <returns>The calculated error.</returns>
        private static double Evaluate(svm_parameter param, svm_problem prob,
                                       double[] target)
        {
            int totalCorrect = 0;

            var error = new ErrorCalculation();

            if ((param.svm_type == svm_parameter.EPSILON_SVR) ||
                (param.svm_type == svm_parameter.NU_SVR))
            {
                for (int i = 0; i < prob.l; i++)
                {
                    double ideal  = prob.y[i];
                    double actual = target[i];
                    error.UpdateError(actual, ideal);
                }
                return(error.Calculate());
            }
            for (int i = 0; i < prob.l; i++)
            {
                if (target[i] == prob.y[i])
                {
                    ++totalCorrect;
                }
            }

            return(Format.HundredPercent * totalCorrect / prob.l);
        }
        public static void Run(IRegressionProblemData problemData, IEnumerable <string> allowedInputVariables,
                               string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree,
                               out ISupportVectorMachineModel model, out int nSv)
        {
            var               dataset        = problemData.Dataset;
            string            targetVariable = problemData.TargetVariable;
            IEnumerable <int> rows           = problemData.TrainingIndices;

            svm_parameter parameter = new svm_parameter {
                svm_type    = GetSvmType(svmType),
                kernel_type = GetKernelType(kernelType),
                C           = cost,
                nu          = nu,
                gamma       = gamma,
                p           = epsilon,
                cache_size  = 500,
                probability = 0,
                eps         = 0.001,
                degree      = degree,
                shrinking   = 1,
                coef0       = 0
            };

            svm_problem    problem        = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
            RangeTransform rangeTransform = RangeTransform.Compute(problem);
            svm_problem    scaledProblem  = rangeTransform.Scale(problem);
            var            svmModel       = svm.svm_train(scaledProblem, parameter);

            nSv = svmModel.SV.Length;

            model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables);
        }
Example #5
0
        /// <summary>
        /// Evaluate the error for the specified model.
        /// </summary>
        /// <param name="param">The params for the SVN.</param>
        /// <param name="prob">The problem to evaluate.</param>
        /// <param name="target">The output values from the SVN.</param>
        /// <returns>The calculated error.</returns>
        private double Evaluate(svm_parameter param, svm_problem prob,
                                double[] target)
        {
            int total_correct = 0;

            ErrorCalculation error = new ErrorCalculation();

            if (param.svm_type == svm_parameter.EPSILON_SVR ||
                param.svm_type == svm_parameter.NU_SVR)
            {
                for (int i = 0; i < prob.l; i++)
                {
                    double ideal  = prob.y[i];
                    double actual = target[i];
                    error.UpdateError(actual, ideal);
                }
                return(error.Calculate());
            }
            else
            {
                for (int i = 0; i < prob.l; i++)
                {
                    if (target[i] == prob.y[i])
                    {
                        ++total_correct;
                    }
                }

                return(100.0 * total_correct / prob.l);
            }
        }
Example #6
0
        public static void Free(svm_parameter x)
        {
            Marshal.FreeHGlobal(x.weight);
            x.weight = IntPtr.Zero;

            Marshal.FreeHGlobal(x.weight_label);
            x.weight_label = IntPtr.Zero;
        }
Example #7
0
        public static SVMParameter Convert(IntPtr ptr)
        {
            if (ptr == IntPtr.Zero)
            {
                return(null);
            }

            svm_parameter x = (svm_parameter)Marshal.PtrToStructure(ptr, typeof(svm_parameter));

            return(SVMParameter.Convert(x));
        }
Example #8
0
        public static void Run(IClassificationProblemData problemData, IEnumerable <string> allowedInputVariables,
                               int svmType, int kernelType, double cost, double nu, double gamma, int degree,
                               out ISupportVectorMachineModel model, out int nSv)
        {
            var               dataset        = problemData.Dataset;
            string            targetVariable = problemData.TargetVariable;
            IEnumerable <int> rows           = problemData.TrainingIndices;

            svm_parameter parameter = new svm_parameter {
                svm_type    = svmType,
                kernel_type = kernelType,
                C           = cost,
                nu          = nu,
                gamma       = gamma,
                cache_size  = 500,
                probability = 0,
                eps         = 0.001,
                degree      = degree,
                shrinking   = 1,
                coef0       = 0
            };

            var weightLabels = new List <int>();
            var weights      = new List <double>();

            foreach (double c in problemData.ClassValues)
            {
                double wSum = 0.0;
                foreach (double otherClass in problemData.ClassValues)
                {
                    if (!c.IsAlmost(otherClass))
                    {
                        wSum += problemData.GetClassificationPenalty(c, otherClass);
                    }
                }
                weightLabels.Add((int)c);
                weights.Add(wSum);
            }
            parameter.weight_label = weightLabels.ToArray();
            parameter.weight       = weights.ToArray();

            svm_problem    problem        = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
            RangeTransform rangeTransform = RangeTransform.Compute(problem);
            svm_problem    scaledProblem  = rangeTransform.Scale(problem);
            var            svmModel       = svm.svm_train(scaledProblem, parameter);

            nSv = svmModel.SV.Length;

            model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues);
        }
Example #9
0
        public static void Free(IntPtr ptr)
        {
            if (ptr == IntPtr.Zero)
            {
                return;
            }

            svm_parameter x = (svm_parameter)Marshal.PtrToStructure(ptr, typeof(svm_parameter));

            SVMParameter.Free(x);

            Marshal.DestroyStructure(ptr, typeof(svm_parameter));
            Marshal.FreeHGlobal(ptr);
            ptr = IntPtr.Zero;
        }
Example #10
0
        /// <summary>
        /// Construct a SVM from a model.
        /// </summary>
        ///
        /// <param name="theModel">The model.</param>
        public SupportVectorMachine(svm_model theModel)
        {
            _model      = theModel;
            _paras      = _model.param;
            _inputCount = 0;

            // determine the input count
            foreach (var element in _model.SV)
            {
                foreach (svm_node t in element)
                {
                    _inputCount = Math.Max(t.index, _inputCount);
                }
            }

            //
        }
Example #11
0
 public MaltLibsvmModel(svm_model model, svm_problem problem)
 {
     param    = model.param;
     nr_class = model.nr_class;
     l        = model.l;
     SV       = model.SV;
     sv_coef  = model.sv_coef;
     rho      = model.rho;
     label    = model.label;
     nSV      = model.nSV;
     start    = new int[nr_class];
     start[0] = 0;
     for (int i = 1; i < nr_class; i++)
     {
         start[i] = start[i - 1] + nSV[i - 1];
     }
 }
        public bool LoadFromFile(string fileName)
        {
            if (File.Exists(fileName))
            {
                FileStream fs = new FileStream(fileName, FileMode.Open);
                using (BinaryReader r = new BinaryReader(fs))
                {
                    this.model = new svm_model();

                    svm_parameter p = new svm_parameter();
                    p.C            = r.ReadDouble();
                    p.cache_size   = r.ReadDouble();
                    p.coef0        = r.ReadDouble();
                    p.degree       = r.ReadDouble();
                    p.eps          = r.ReadDouble();
                    p.gamma        = r.ReadDouble();
                    p.kernel_type  = r.ReadInt32();
                    p.nr_weight    = r.ReadInt32();
                    p.nu           = r.ReadDouble();
                    p.p            = r.ReadDouble();
                    p.probability  = r.ReadInt32();
                    p.shrinking    = r.ReadInt32();
                    p.svm_type     = r.ReadInt32();
                    p.weight       = ReadDoubleArray(r);
                    p.weight_label = ReadIntArray(r);

                    this.model.param    = p;
                    this.model.nr_class = r.ReadInt32();
                    this.model.l        = r.ReadInt32();
                    this.model.SV       = ReadSvmNodeArray(r);
                    this.model.sv_coef  = ReadDouble2DArray(r);
                    this.model.rho      = ReadDoubleArray(r);
                    this.model.probA    = ReadDoubleArray(r);
                    this.model.probB    = ReadDoubleArray(r);
                    this.model.label    = ReadIntArray(r);
                    this.model.nSV      = ReadIntArray(r);

                    return(true);
                }
            }

            this.model = null;
            return(false);
        }
Example #13
0
        public static IntPtr Allocate(SVMParameter x)
        {
            if (x == null)
            {
                return(IntPtr.Zero);
            }

            svm_parameter y = new svm_parameter();

            y.svm_type    = (int)x.Type;
            y.kernel_type = (int)x.Kernel;
            y.degree      = x.Degree;
            y.gamma       = x.Gamma;
            y.coef0       = x.Coef0;
            y.cache_size  = x.CacheSize;
            y.eps         = x.Eps;
            y.C           = x.C;
            y.nu          = x.Nu;
            y.p           = x.P;
            y.shrinking   = x.Shrinking ? 1 : 0;
            y.probability = x.Probability ? 1 : 0;
            y.nr_weight   = x.WeightLabels.Length;

            y.weight_label = IntPtr.Zero;
            if (y.nr_weight > 0)
            {
                y.weight_label = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(int)) * x.WeightLabels.Length);
                Marshal.Copy(x.WeightLabels, 0, y.weight_label, x.WeightLabels.Length);
            }

            y.weight = IntPtr.Zero;
            if (y.nr_weight > 0)
            {
                y.weight = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(double)) * x.Weights.Length);
                Marshal.Copy(x.Weights, 0, y.weight, x.Weights.Length);
            }

            int    size = Marshal.SizeOf(y);
            IntPtr ptr  = Marshal.AllocHGlobal(size);

            Marshal.StructureToPtr(y, ptr, true);

            return(ptr);
        }
        public void TrainModel(double[] labels, double[][] mlArray)
        {
            SvmProblemBuilder builder = new SvmProblemBuilder(labels, mlArray);
            svm_problem       problem = builder.CreateProblem();

            svm_parameter param = new svm_parameter()
            {
                svm_type     = 0,
                kernel_type  = 0,
                cache_size   = 512,
                eps          = 0.1,
                C            = 10,
                nr_weight    = 0,
                weight_label = null,
                weight       = null
            };

            this.model = svm.svm_train(problem, param);
        }
        /// <summary>
        /// Instantiate and return a svm_parameter object with default values.
        /// </summary>
        /// <returns>A svm_parameter object with default values</returns>
        public static svm_parameter DefaultParameters()
        {
            svm_parameter parameter = new svm_parameter();

            parameter.svm_type    = svm_parameter.NU_SVR;
            parameter.kernel_type = svm_parameter.RBF;
            parameter.C           = 1;
            parameter.nu          = 0.5;
            parameter.gamma       = 1;
            parameter.p           = 1;
            parameter.cache_size  = 500;
            parameter.probability = 0;
            parameter.eps         = 0.001;
            parameter.degree      = 3;
            parameter.shrinking   = 1;
            parameter.coef0       = 0;

            return(parameter);
        }
        static double CrossValidate(long randomSeed, double C)
        {
            var training = Create1vs1Problem(trainingData, 1, 5);

            var config = new svm_parameter()
            {
                svm_type    = (int)SvmType.C_SVC,
                kernel_type = (int)KernelType.POLY,
                C           = C,
                degree      = 2,
                coef0       = 1,
                gamma       = 1,
                eps         = 0.001
            };

            double[] result = new double[training.l];
            svm.rand.setSeed(randomSeed);
            svm.svm_cross_validation(training, config, 10, result);
            return((result.Zip(training.y, (v, u) => Math.Sign(v) != Math.Sign(u) ? 1 : 0).Sum() + 0.0) / result.Length);
        }
Example #17
0
        /// <summary>
        /// Load the models.
        /// </summary>
        /// <param name="xmlin">Where to read the models from.</param>
        /// <param name="network">Where the models are read into.</param>
        private void HandleModels(ReadXML xmlin, SVMNetwork network)
        {
            int index = 0;

            while (xmlin.ReadToTag())
            {
                if (xmlin.IsIt(SVMNetworkPersistor.TAG_MODEL, true))
                {
                    svm_parameter param = new svm_parameter();
                    svm_model     model = new svm_model();
                    model.param           = param;
                    network.Models[index] = model;
                    HandleModel(xmlin, network.Models[index]);
                    index++;
                }
                else if (xmlin.IsIt(SVMNetworkPersistor.TAG_MODELS, false))
                {
                    break;
                }
            }
        }
Example #18
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: private libsvm.svm_parameter getLibSvmParameters(java.util.LinkedHashMap<String, String> libOptions) throws org.maltparser.core.exception.MaltChainedException
        private svm_parameter getLibSvmParameters(LinkedHashMap <string, string> libOptions)
        {
            svm_parameter param = new svm_parameter();

            param.svm_type     = int.Parse(libOptions.get("s"));
            param.kernel_type  = int.Parse(libOptions.get("t"));
            param.degree       = int.Parse(libOptions.get("d"));
            param.gamma        = Convert.ToDouble(libOptions.get("g"));
            param.coef0        = Convert.ToDouble(libOptions.get("r"));
            param.nu           = Convert.ToDouble(libOptions.get("n"));
            param.cache_size   = Convert.ToDouble(libOptions.get("m"));
            param.C            = Convert.ToDouble(libOptions.get("c"));
            param.eps          = Convert.ToDouble(libOptions.get("e"));
            param.p            = Convert.ToDouble(libOptions.get("p"));
            param.shrinking    = int.Parse(libOptions.get("h"));
            param.probability  = int.Parse(libOptions.get("b"));
            param.nr_weight    = 0;
            param.weight_label = new int[0];
            param.weight       = new double[0];
            return(param);
        }
        public void SaveModel(string fileName)
        {
            FileStream fs = new FileStream(fileName, FileMode.Create);

            using (BinaryWriter w = new BinaryWriter(fs))
            {
                svm_parameter p = model.param;
                w.Write(p.C);
                w.Write(p.cache_size);
                w.Write(p.coef0);
                w.Write(p.degree);
                w.Write(p.eps);
                w.Write(p.gamma);
                w.Write(p.kernel_type);
                w.Write(p.nr_weight);
                w.Write(p.nu);
                w.Write(p.p);
                w.Write(p.probability);
                w.Write(p.shrinking);
                w.Write(p.svm_type);
                WriteArray(w, p.weight);
                WriteArray(w, p.weight_label);

                w.Write(model.nr_class);
                w.Write(model.l);
                WriteArray(w, model.SV);
                WriteArray(w, model.sv_coef);

                WriteArray(w, model.rho);
                WriteArray(w, model.probA);
                WriteArray(w, model.probB);

                WriteArray(w, model.label);
                WriteArray(w, model.nSV);
            }

            fs.Close();
        }
 /// <summary>
 /// Construct the SVM.
 /// </summary>
 ///
 public SupportVectorMachine()
 {
     _paras = new svm_parameter();
 }
    private static SupportVectorRegressionSolution SvmGridSearch(IRegressionProblemData problemData, out svm_parameter bestParameters, out int nSv, out double cvMse)
    {
        bestParameters = SupportVectorMachineUtil.GridSearch(out cvMse, problemData, svmParameterRanges, numberOfFolds, shuffleFolds, maximumDegreeOfParallelism);
        double trainingError, testError;
        string svmType      = svmTypes[bestParameters.svm_type];
        string kernelType   = kernelTypes[bestParameters.kernel_type];
        var    svm_solution = SupportVectorRegression.CreateSupportVectorRegressionSolution(problemData, problemData.AllowedInputVariables, svmType, kernelType,
                                                                                            bestParameters.C, bestParameters.nu, bestParameters.gamma, bestParameters.eps, bestParameters.degree, out trainingError, out testError, out nSv);

        return(svm_solution);
    }
Example #22
0
        internal static double k_function(MaltFeatureNode[] x, svm_node[] y, svm_parameter param)
        {
            switch (param.kernel_type)
            {
            case svm_parameter.LINEAR:
                return(dot(x, y));

            case svm_parameter.POLY:
                return(powi(param.gamma * dot(x, y) + param.coef0, param.degree));

            case svm_parameter.RBF:
            {
                double sum  = 0;
                int    xlen = x.Length;
                int    ylen = y.Length;
                int    i    = 0;
                int    j    = 0;
                while (i < xlen && j < ylen)
                {
                    if (x[i].index == y[j].index)
                    {
                        double d = x[i++].value - y[j++].value;
                        sum += d * d;
                    }
                    else if (x[i].index > y[j].index)
                    {
                        sum += y[j].value * y[j].value;
                        ++j;
                    }
                    else
                    {
                        sum += x[i].value * x[i].value;
                        ++i;
                    }
                }

                while (i < xlen)
                {
                    sum += x[i].value * x[i].value;
                    ++i;
                }

                while (j < ylen)
                {
                    sum += y[j].value * y[j].value;
                    ++j;
                }

                return(Math.Exp(-param.gamma * sum));
            }

            case svm_parameter.SIGMOID:
                return(Math.Tanh(param.gamma * dot(x, y) + param.coef0));

            case svm_parameter.PRECOMPUTED:
                return(x[(int)(y[0].value)].value);

            default:
                return(0);                        // java
            }
        }
Example #23
0
        /// <summary>
        /// Save a model.
        /// </summary>
        /// <param name="xmlout">Where to save a model to.</param>
        /// <param name="model">The model to save to.</param>
        public static void SaveModel(WriteXML xmlout, svm_model model)
        {
            if (model != null)
            {
                xmlout.BeginTag(SVMNetworkPersistor.TAG_MODEL);

                svm_parameter param = model.param;

                xmlout.AddProperty(SVMNetworkPersistor.TAG_TYPE_SVM,
                                   svm_type_table[param.svm_type]);
                xmlout.AddProperty(SVMNetworkPersistor.TAG_TYPE_KERNEL,
                                   kernel_type_table[param.kernel_type]);

                if (param.kernel_type == svm_parameter.POLY)
                {
                    xmlout.AddProperty(SVMNetworkPersistor.TAG_DEGREE, param.degree);
                }

                if (param.kernel_type == svm_parameter.POLY ||
                    param.kernel_type == svm_parameter.RBF ||
                    param.kernel_type == svm_parameter.SIGMOID)
                {
                    xmlout.AddProperty(SVMNetworkPersistor.TAG_GAMMA, param.gamma);
                }

                if (param.kernel_type == svm_parameter.POLY ||
                    param.kernel_type == svm_parameter.SIGMOID)
                {
                    xmlout.AddProperty(SVMNetworkPersistor.TAG_COEF0, param.coef0);
                }

                int nr_class = model.nr_class;
                int l        = model.l;

                xmlout.AddProperty(SVMNetworkPersistor.TAG_NUMCLASS, nr_class);
                xmlout.AddProperty(SVMNetworkPersistor.TAG_TOTALSV, l);

                xmlout.AddProperty(SVMNetworkPersistor.TAG_RHO, model.rho, nr_class
                                   * (nr_class - 1) / 2);
                xmlout.AddProperty(SVMNetworkPersistor.TAG_LABEL, model.label,
                                   nr_class);
                xmlout.AddProperty(SVMNetworkPersistor.TAG_PROB_A, model.probA,
                                   nr_class * (nr_class - 1) / 2);
                xmlout.AddProperty(SVMNetworkPersistor.TAG_PROB_B, model.probB,
                                   nr_class * (nr_class - 1) / 2);
                xmlout.AddProperty(SVMNetworkPersistor.TAG_NSV, model.nSV, nr_class);

                xmlout.BeginTag(SVMNetworkPersistor.TAG_DATA);

                double[][]   sv_coef = model.sv_coef;
                svm_node[][] SV      = model.SV;

                StringBuilder line = new StringBuilder();
                for (int i = 0; i < l; i++)
                {
                    line.Length = 0;
                    for (int j = 0; j < nr_class - 1; j++)
                    {
                        line.Append(sv_coef[j][i] + " ");
                    }

                    svm_node[] p = SV[i];
                    //if (param.kernel_type == svm_parameter.PRECOMPUTED)
                    //{
                    //  line.Append("0:" + (int) (p[0].value));
                    //}
                    //else
                    for (int j = 0; j < p.Length; j++)
                    {
                        line.Append(p[j].index + ":" + p[j].value_Renamed + " ");
                    }
                    xmlout.AddProperty(SVMNetworkPersistor.TAG_ROW, line.ToString());
                }

                xmlout.EndTag();
                xmlout.EndTag();
            }
        }
Example #24
0
    private void  parse_command_line(System.String[] argv)
    {
        int i;

        param = new svm_parameter();
        // default values
        param.svm_type     = svm_parameter.C_SVC;
        param.kernel_type  = svm_parameter.RBF;
        param.degree       = 3;
        param.gamma        = 0;  // 1/k
        param.coef0        = 0;
        param.nu           = 0.5;
        param.cache_size   = 40;
        param.C            = 1;
        param.eps          = 1e-3;
        param.p            = 0.1;
        param.shrinking    = 1;
        param.probability  = 0;
        param.nr_weight    = 0;
        param.weight_label = new int[0];
        param.weight       = new double[0];

        // parse options
        for (i = 0; i < argv.Length; i++)
        {
            if (argv[i][0] != '-')
            {
                break;
            }
            ++i;
            switch (argv[i - 1][1])
            {
            case 's':
                param.svm_type = atoi(argv[i]);
                break;

            case 't':
                param.kernel_type = atoi(argv[i]);
                break;

            case 'd':
                param.degree = atof(argv[i]);
                break;

            case 'g':
                param.gamma = atof(argv[i]);
                break;

            case 'r':
                param.coef0 = atof(argv[i]);
                break;

            case 'n':
                param.nu = atof(argv[i]);
                break;

            case 'm':
                param.cache_size = atof(argv[i]);
                break;

            case 'c':
                param.C = atof(argv[i]);
                break;

            case 'e':
                param.eps = atof(argv[i]);
                break;

            case 'p':
                param.p = atof(argv[i]);
                break;

            case 'h':
                param.shrinking = atoi(argv[i]);
                break;

            case 'b':
                param.probability = atoi(argv[i]);
                break;

            case 'v':
                cross_validation = 1;
                nr_fold          = atoi(argv[i]);
                if (nr_fold < 2)
                {
                    System.Console.Error.Write("n-fold cross validation: n must >= 2\n");
                    exit_with_help();
                }
                break;

            case 'w':
                ++param.nr_weight;
                {
                    int[] old = param.weight_label;
                    param.weight_label = new int[param.nr_weight];
                    Array.Copy(old, 0, param.weight_label, 0, param.nr_weight - 1);
                }

                {
                    double[] old = param.weight;
                    param.weight = new double[param.nr_weight];
                    Array.Copy(old, 0, param.weight, 0, param.nr_weight - 1);
                }

                param.weight_label[param.nr_weight - 1] = atoi(argv[i - 1].Substring(2));
                param.weight[param.nr_weight - 1]       = atof(argv[i]);
                break;

            default:
                System.Console.Error.Write("unknown option\n");
                exit_with_help();
                break;
            }
        }

        // determine filenames

        if (i >= argv.Length)
        {
            exit_with_help();
        }

        input_file_name = argv[i];

        if (i < argv.Length - 1)
        {
            model_file_name = argv[i + 1];
        }
        else
        {
            int p = argv[i].LastIndexOf((System.Char) '/');
            ++p;             // whew...
            model_file_name = argv[i].Substring(p) + ".model";
        }
    }
        /// <summary>
        /// Construct a SVM network.
        /// </summary>
        ///
        /// <param name="theInputCount">The input count.</param>
        /// <param name="svmType">The type of SVM.</param>
        /// <param name="kernelType">The SVM kernal type.</param>
        public SupportVectorMachine(int theInputCount, SVMType svmType,
                                    KernelType kernelType)
        {
            _inputCount = theInputCount;

            _paras = new svm_parameter();

            switch (svmType)
            {
            case SVMType.SupportVectorClassification:
                _paras.svm_type = svm_parameter.C_SVC;
                break;

            case SVMType.NewSupportVectorClassification:
                _paras.svm_type = svm_parameter.NU_SVC;
                break;

            case SVMType.SupportVectorOneClass:
                _paras.svm_type = svm_parameter.ONE_CLASS;
                break;

            case SVMType.EpsilonSupportVectorRegression:
                _paras.svm_type = svm_parameter.EPSILON_SVR;
                break;

            case SVMType.NewSupportVectorRegression:
                _paras.svm_type = svm_parameter.NU_SVR;
                break;

            default:
                throw new NeuralNetworkError("Invalid svm type");
            }

            switch (kernelType)
            {
            case KernelType.Linear:
                _paras.kernel_type = svm_parameter.LINEAR;
                break;

            case KernelType.Poly:
                _paras.kernel_type = svm_parameter.POLY;
                break;

            case KernelType.RadialBasisFunction:
                _paras.kernel_type = svm_parameter.RBF;
                break;

            case KernelType.Sigmoid:
                _paras.kernel_type = svm_parameter.SIGMOID;
                break;

            /*case Encog.ML.SVM.KernelType.Precomputed:
             * this.paras.kernel_type = Encog.MathUtil.LIBSVM.svm_parameter.PRECOMPUTED;
             *          break;*/
            default:
                throw new NeuralNetworkError("Invalid kernel type");
            }

            // params[i].kernel_type = svm_parameter.RBF;
            _paras.degree       = DefaultDegree;
            _paras.coef0        = 0;
            _paras.nu           = DefaultNu;
            _paras.cache_size   = DefaultCacheSize;
            _paras.C            = 1;
            _paras.eps          = DefaultEps;
            _paras.p            = DefaultP;
            _paras.shrinking    = 1;
            _paras.probability  = 0;
            _paras.nr_weight    = 0;
            _paras.weight_label = new int[0];
            _paras.weight       = new double[0];
            _paras.gamma        = 1.0d / _inputCount;
        }
Example #26
0
        /// <summary>
        /// Construct a SVM network.
        /// </summary>
        /// <param name="inputCount">The input count.</param>
        /// <param name="outputCount">The output count.</param>
        /// <param name="svmType">The type of SVM.</param>
        /// <param name="kernelType">The SVM kernal type.</param>
        public SVMNetwork(int inputCount, int outputCount, SVMType svmType,
                          KernelType kernelType)
        {
            this.inputCount  = inputCount;
            this.outputCount = outputCount;
            this.kernelType  = kernelType;
            this.svmType     = svmType;

            models     = new svm_model[outputCount];
            parameters = new svm_parameter[outputCount];

            for (int i = 0; i < outputCount; i++)
            {
                parameters[i] = new svm_parameter();

                switch (svmType)
                {
                case SVMType.SupportVectorClassification:
                    parameters[i].svm_type = svm_parameter.C_SVC;
                    break;

                case SVMType.NewSupportVectorClassification:
                    parameters[i].svm_type = svm_parameter.NU_SVC;
                    break;

                case SVMType.SupportVectorOneClass:
                    parameters[i].svm_type = svm_parameter.ONE_CLASS;
                    break;

                case SVMType.EpsilonSupportVectorRegression:
                    parameters[i].svm_type = svm_parameter.EPSILON_SVR;
                    break;

                case SVMType.NewSupportVectorRegression:
                    parameters[i].svm_type = svm_parameter.NU_SVR;
                    break;
                }

                switch (kernelType)
                {
                case KernelType.Linear:
                    parameters[i].kernel_type = svm_parameter.LINEAR;
                    break;

                case KernelType.Poly:
                    parameters[i].kernel_type = svm_parameter.POLY;
                    break;

                case KernelType.RadialBasisFunction:
                    parameters[i].kernel_type = svm_parameter.RBF;
                    break;

                case KernelType.Sigmoid:
                    parameters[i].kernel_type = svm_parameter.SIGMOID;
                    break;
                }

                parameters[i].kernel_type  = svm_parameter.RBF;
                parameters[i].degree       = 3;
                parameters[i].coef0        = 0;
                parameters[i].nu           = 0.5;
                parameters[i].cache_size   = 100;
                parameters[i].C            = 1;
                parameters[i].eps          = 1e-3;
                parameters[i].p            = 0.1;
                parameters[i].shrinking    = 1;
                parameters[i].probability  = 0;
                parameters[i].nr_weight    = 0;
                parameters[i].weight_label = new int[0];
                parameters[i].weight       = new double[0];
                parameters[i].gamma        = 1.0 / inputCount;
            }
        }
Example #27
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: protected void trainInternal(java.util.LinkedHashMap<String, String> libOptions) throws org.maltparser.core.exception.MaltChainedException
        protected internal override void trainInternal(LinkedHashMap <string, string> libOptions)
        {
            try
            {
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final libsvm.svm_problem prob = readProblem(getInstanceInputStreamReader(".ins"), libOptions);
                svm_problem prob = readProblem(getInstanceInputStreamReader(".ins"), libOptions);
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final libsvm.svm_parameter param = getLibSvmParameters(libOptions);
                svm_parameter param = getLibSvmParameters(libOptions);
                if (svm.svm_check_parameter(prob, param) != null)
                {
                    throw new LibException(svm.svm_check_parameter(prob, param));
                }
                Configuration config = Configuration;

                if (config.LoggerInfoEnabled)
                {
                    config.logInfoMessage("Creating LIBSVM model " + getFile(".moo").Name + "\n");
                }
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final java.io.PrintStream out = System.out;
                PrintStream @out = System.out;
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final java.io.PrintStream err = System.err;
                PrintStream err = System.err;
                System.Out = NoPrintStream.NO_PRINTSTREAM;
                System.Err = NoPrintStream.NO_PRINTSTREAM;
                svm_model model = svm.svm_train(prob, param);
                System.Out = err;
                System.Out = @out;
                ObjectOutputStream output = new ObjectOutputStream(new BufferedOutputStream(new FileStream(getFile(".moo").AbsolutePath, FileMode.Create, FileAccess.Write)));
                try
                {
                    output.writeObject(new MaltLibsvmModel(model, prob));
                }
                finally
                {
                    output.close();
                }
                bool saveInstanceFiles = ((bool?)Configuration.getOptionValue("lib", "save_instance_files")).Value;
                if (!saveInstanceFiles)
                {
                    getFile(".ins").delete();
                }
            }
            catch (OutOfMemoryException e)
            {
                throw new LibException("Out of memory. Please increase the Java heap size (-Xmx<size>). ", e);
            }
            catch (ArgumentException e)
            {
                throw new LibException("The LIBSVM learner was not able to redirect Standard Error stream. ", e);
            }
            catch (SecurityException e)
            {
                throw new LibException("The LIBSVM learner cannot remove the instance file. ", e);
            }
            catch (IOException e)
            {
                throw new LibException("The LIBSVM learner cannot save the model file '" + getFile(".mod").AbsolutePath + "'. ", e);
            }
        }
        public static double CrossValidate(IDataAnalysisProblemData problemData, svm_parameter parameters, int numberOfFolds, bool shuffleFolds = true)
        {
            var partitions = GenerateSvmPartitions(problemData, numberOfFolds, shuffleFolds);

            return(CalculateCrossValidationPartitions(partitions, parameters));
        }
Example #29
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: private libsvm.svm_problem readProblem(java.io.InputStreamReader isr, java.util.LinkedHashMap<String, String> libOptions) throws org.maltparser.core.exception.MaltChainedException
        private svm_problem readProblem(StreamReader isr, LinkedHashMap <string, string> libOptions)
        {
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final libsvm.svm_problem problem = new libsvm.svm_problem();
            svm_problem problem = new svm_problem();
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final libsvm.svm_parameter param = getLibSvmParameters(libOptions);
            svm_parameter param = getLibSvmParameters(libOptions);
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final org.maltparser.ml.lib.FeatureList featureList = new org.maltparser.ml.lib.FeatureList();
            FeatureList featureList = new FeatureList();

            try
            {
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final java.io.BufferedReader fp = new java.io.BufferedReader(isr);
                StreamReader fp = new StreamReader(isr);

                problem.l = NumberOfInstances;
                problem.x = new svm_node[problem.l][];
                problem.y = new double[problem.l];
                int i = 0;

                while (true)
                {
                    string line = fp.ReadLine();
                    if (ReferenceEquals(line, null))
                    {
                        break;
                    }
                    int y = binariesInstance(line, featureList);
                    if (y == -1)
                    {
                        continue;
                    }
                    try
                    {
                        problem.y[i] = y;
                        problem.x[i] = new svm_node[featureList.size()];
                        int p = 0;
                        for (int k = 0; k < featureList.size(); k++)
                        {
                            MaltFeatureNode x = featureList.get(k);
                            problem.x[i][p]       = new svm_node();
                            problem.x[i][p].value = x.Value;
                            problem.x[i][p].index = x.Index;
                            p++;
                        }
                        i++;
                    }
                    catch (IndexOutOfRangeException e)
                    {
                        throw new LibException("Couldn't read libsvm problem from the instance file. ", e);
                    }
                }
                fp.Close();
                if (param.gamma == 0)
                {
                    param.gamma = 1.0 / featureMap.FeatureCounter;
                }
            }
            catch (IOException e)
            {
                throw new LibException("Couldn't read libsvm problem from the instance file. ", e);
            }
            return(problem);
        }
        private static double CalculateCrossValidationPartitions(Tuple <svm_problem, svm_problem>[] partitions, svm_parameter parameters)
        {
            double avgTestMse = 0;
            var    calc       = new OnlineMeanSquaredErrorCalculator();

            foreach (Tuple <svm_problem, svm_problem> tuple in partitions)
            {
                var trainingSvmProblem = tuple.Item1;
                var testSvmProblem     = tuple.Item2;
                var model = svm.svm_train(trainingSvmProblem, parameters);
                calc.Reset();
                for (int i = 0; i < testSvmProblem.l; ++i)
                {
                    calc.Add(testSvmProblem.y[i], svm.svm_predict(model, testSvmProblem.x[i]));
                }
                double mse = calc.ErrorState == OnlineCalculatorError.None ? calc.MeanSquaredError : double.NaN;
                avgTestMse += mse;
            }
            avgTestMse /= partitions.Length;
            return(avgTestMse);
        }