Esempio n. 1
0
 /// <summary>
 /// Import the model from an xml file
 /// </summary>
 public void Import(string model_file_name)
 {
     try
     {
         XmlSerializer serializer = new XmlSerializer(typeof(svm_model));
         FileStream    fs         = new FileStream(model_file_name, FileMode.Open);
         this.model = (svm_model)serializer.Deserialize(fs);
         fs.Close();
     }
     catch (Exception ex)
     {
         throw new Exception("An error occured when importing svm model: " + ex.Message);
     }
 }
        /// <summary>
        ///
        /// </summary>
        /// <param name="text"></param>
        /// <returns></returns>
        public void klassifiziere(string text, Hashtable vektorraum, svm_model model)
        {
            FeatureVector fv = new FeatureVector(text);

            fv.Vektorraum = vektorraum;
            fv.ErzeugeVektor();
            LibSVMWrapper lsvmwrap = new LibSVMWrapper();

            lsvmwrap.SVMModel = model;
            lsvmwrap.klazzifiziere(fv.Vektor);
            label  = lsvmwrap.Label;
            labels = lsvmwrap.Labels;
            probs  = lsvmwrap.Wahrscheinlichkeiten;
        }
        public bool LoadFromFile(string fileName)
        {
            if (File.Exists(fileName))
            {
                FileStream fs = new FileStream(fileName, FileMode.Open);
                using (BinaryReader r = new BinaryReader(fs))
                {
                    this.model = new svm_model();

                    svm_parameter p = new svm_parameter();
                    p.C = r.ReadDouble();
                    p.cache_size = r.ReadDouble();
                    p.coef0 = r.ReadDouble();
                    p.degree = r.ReadDouble();
                    p.eps = r.ReadDouble();
                    p.gamma = r.ReadDouble();
                    p.kernel_type = r.ReadInt32();
                    p.nr_weight = r.ReadInt32();
                    p.nu = r.ReadDouble();
                    p.p = r.ReadDouble();
                    p.probability = r.ReadInt32();
                    p.shrinking = r.ReadInt32();
                    p.svm_type = r.ReadInt32();
                    p.weight = ReadDoubleArray(r);
                    p.weight_label = ReadIntArray(r);

                    this.model.param = p;
                    this.model.nr_class = r.ReadInt32();
                    this.model.l = r.ReadInt32();
                    this.model.SV = ReadSvmNodeArray(r);
                    this.model.sv_coef = ReadDouble2DArray(r);
                    this.model.rho = ReadDoubleArray(r);
                    this.model.probA = ReadDoubleArray(r);
                    this.model.probB = ReadDoubleArray(r);
                    this.model.label = ReadIntArray(r);
                    this.model.nSV = ReadIntArray(r);

                    return true;
                }
            }

            this.model = null;
            return false;
        }
Esempio n. 4
0
	private void  run(System.String[] argv)
	{
		parse_command_line(argv);
		read_problem();
		error_msg = svm.svm_check_parameter(prob, param);
		
		if ((System.Object) error_msg != null)
		{
			System.Console.Error.Write("Error: " + error_msg + "\n");
			System.Environment.Exit(1);
		}
		
		if (cross_validation != 0)
		{
			do_cross_validation();
		}
		else
		{
			model = svm.svm_train(prob, param);
			svm.svm_save_model(model_file_name, model);
		}
	}
Esempio n. 5
0
 public static int svm_get_svm_type(svm_model model)
 {
     return model.param.svm_type;
 }
Esempio n. 6
0
 public static int svm_get_nr_class(svm_model model)
 {
     return model.nr_class;
 }
Esempio n. 7
0
 public static void svm_get_labels(svm_model model, int[] label)
 {
     if (model.label != null)
         for (int i = 0; i < model.nr_class; i++)
             label[i] = model.label[i];
 }
Esempio n. 8
0
        //
        // Interface functions
        //
        public static svm_model svm_train(svm_problem prob, svm_parameter param, TrainingProgressEvent progressEvent = null)
        {
            svm_model model = new svm_model();
            model.param = param;

            if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR)
            {
                // regression or one-class-svm
                model.nr_class = 2;
                model.label = null;
                model.nSV = null;
                model.probA = null; model.probB = null;
                model.sv_coef = new double[1][];

                if (param.probability == 1 && (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR))
                {
                    model.probA = new double[1];
                    model.probA[0] = svm_svr_probability(prob, param);
                }

                decision_function f = svm_train_one(prob, param, 0, 0);
                model.rho = new double[1];
                model.rho[0] = f.rho;

                int nSV = 0;
                int i;
                for (i = 0; i < prob.l; i++)
                    if (System.Math.Abs(f.alpha[i]) > 0)
                        ++nSV;
                model.l = nSV;
                model.SV = new svm_node[nSV][];
                model.sv_coef[0] = new double[nSV];
                int j = 0;
                for (i = 0; i < prob.l; i++)
                    if (System.Math.Abs(f.alpha[i]) > 0)
                    {
                        model.SV[j] = prob.x[i];
                        model.sv_coef[0][j] = f.alpha[i];
                        ++j;
                    }
            }
            else
            {
                // classification
                // find out the number of classes
                int l = prob.l;
                int max_nr_class = 16;
                int nr_class = 0;
                int[] label = new int[max_nr_class];
                int[] count = new int[max_nr_class];
                int[] index = new int[l];

                int i;
                for (i = 0; i < l; i++)
                {
                    //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
                    int this_label = (int)prob.y[i];
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (this_label == label[j])
                        {
                            ++count[j];
                            break;
                        }
                    index[i] = j;
                    if (j == nr_class)
                    {
                        if (nr_class == max_nr_class)
                        {
                            max_nr_class *= 2;
                            int[] new_data = new int[max_nr_class];
                            Array.Copy(label, 0, new_data, 0, label.Length);
                            label = new_data;

                            new_data = new int[max_nr_class];
                            Array.Copy(count, 0, new_data, 0, count.Length);
                            count = new_data;
                        }
                        label[nr_class] = this_label;
                        count[nr_class] = 1;
                        ++nr_class;
                    }
                }

                // group training data of the same class

                int[] start = new int[nr_class];
                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                svm_node[][] x = new svm_node[l][];

                for (i = 0; i < l; i++)
                {
                    x[start[index[i]]] = prob.x[i];
                    ++start[index[i]];
                }

                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                // calculate weighted C

                double[] weighted_C = new double[nr_class];
                for (i = 0; i < nr_class; i++)
                    weighted_C[i] = param.C;
                for (i = 0; i < param.nr_weight; i++)
                {
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (param.weight_label[i] == label[j])
                            break;
                    if (j == nr_class)
                        System.Console.Error.Write("warning: class label " + param.weight_label[i] + " specified in weight is not found\n");
                    else
                        weighted_C[j] *= param.weight[i];
                }

                // train k*(k-1)/2 models

                bool[] nonzero = new bool[l];
                for (i = 0; i < l; i++)
                    nonzero[i] = false;
                decision_function[] f = new decision_function[nr_class * (nr_class - 1) / 2];

                double[] probA = null, probB = null;
                if (param.probability == 1)
                {
                    probA = new double[nr_class * (nr_class - 1) / 2];
                    probB = new double[nr_class * (nr_class - 1) / 2];
                }

                int p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        svm_problem sub_prob = new svm_problem();
                        int si = start[i], sj = start[j];
                        int ci = count[i], cj = count[j];
                        sub_prob.l = ci + cj;
                        sub_prob.x = new svm_node[sub_prob.l][];
                        sub_prob.y = new double[sub_prob.l];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            sub_prob.x[k] = x[si + k];
                            sub_prob.y[k] = +1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            sub_prob.x[ci + k] = x[sj + k];
                            sub_prob.y[ci + k] = -1;
                        }

                        if (param.probability == 1)
                        {
                            double[] probAB = new double[2];
                            svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
                            probA[p] = probAB[0];
                            probB[p] = probAB[1];
                        }

                        f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);

                        for (k = 0; k < ci; k++)
                            if (!nonzero[si + k] && System.Math.Abs(f[p].alpha[k]) > 0)
                                nonzero[si + k] = true;
                        for (k = 0; k < cj; k++)
                            if (!nonzero[sj + k] && System.Math.Abs(f[p].alpha[ci + k]) > 0)
                                nonzero[sj + k] = true;
                        ++p;
                    }

                // build output

                model.nr_class = nr_class;

                model.label = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    model.label[i] = label[i];

                model.rho = new double[nr_class * (nr_class - 1) / 2];
                for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    model.rho[i] = f[i].rho;

                if (param.probability == 1)
                {
                    model.probA = new double[nr_class * (nr_class - 1) / 2];
                    model.probB = new double[nr_class * (nr_class - 1) / 2];
                    for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    {
                        model.probA[i] = probA[i];
                        model.probB[i] = probB[i];
                    }
                }
                else
                {
                    model.probA = null;
                    model.probB = null;
                }

                int nnz = 0;
                int[] nz_count = new int[nr_class];
                model.nSV = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    int nSV = 0;
                    for (int j = 0; j < count[i]; j++)
                        if (nonzero[start[i] + j])
                        {
                            ++nSV;
                            ++nnz;
                        }
                    model.nSV[i] = nSV;
                    nz_count[i] = nSV;
                }

                //Debug.WriteLine("Total nSV = " + nnz + "\n");

                model.l = nnz;
                model.SV = new svm_node[nnz][];
                p = 0;
                for (i = 0; i < l; i++)
                    if (nonzero[i])
                        model.SV[p++] = x[i];

                int[] nz_start = new int[nr_class];
                nz_start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

                model.sv_coef = new double[nr_class - 1][];
                for (i = 0; i < nr_class - 1; i++)
                    model.sv_coef[i] = new double[nnz];

                p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]

                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];

                        int q = nz_start[i];
                        int k;
                        for (k = 0; k < ci; k++)
                            if (nonzero[si + k])
                                model.sv_coef[j - 1][q++] = f[p].alpha[k];
                        q = nz_start[j];
                        for (k = 0; k < cj; k++)
                            if (nonzero[sj + k])
                                model.sv_coef[i][q++] = f[p].alpha[ci + k];
                        ++p;
                    }
            }
            return model;
        }
Esempio n. 9
0
        public static void svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
        {
            if (model.param.svm_type == svm_parameter.ONE_CLASS || model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR)
            {
                double[] sv_coef = model.sv_coef[0];
                double sum = 0;
                for (int i = 0; i < model.l; i++)
                    sum += sv_coef[i] * Kernel.k_function(x, model.SV[i], model.param);
                sum -= model.rho[0];
                dec_values[0] = sum;
            }
            else
            {
                int i;
                int nr_class = model.nr_class;
                int l = model.l;

                double[] kvalue = new double[l];
                for (i = 0; i < l; i++)
                    kvalue[i] = Kernel.k_function(x, model.SV[i], model.param);

                int[] start = new int[nr_class];
                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + model.nSV[i - 1];

                int p = 0;
                int pos = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        double sum = 0;
                        int si = start[i];
                        int sj = start[j];
                        int ci = model.nSV[i];
                        int cj = model.nSV[j];

                        int k;
                        double[] coef1 = model.sv_coef[j - 1];
                        double[] coef2 = model.sv_coef[i];
                        for (k = 0; k < ci; k++)
                            sum += coef1[si + k] * kvalue[si + k];
                        for (k = 0; k < cj; k++)
                            sum += coef2[sj + k] * kvalue[sj + k];
                        sum -= model.rho[p++];
                        dec_values[pos++] = sum;
                    }
            }
        }
Esempio n. 10
0
        public static double svm_predict(svm_model model, svm_node[] x)
        {
            if (model.param.svm_type == svm_parameter.ONE_CLASS || model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR)
            {
                double[] res = new double[1];
                svm_predict_values(model, x, res);

                if (model.param.svm_type == svm_parameter.ONE_CLASS)
                    return (res[0] > 0)?1:- 1;
                else
                    return res[0];
            }
            else
            {
                int i;
                int nr_class = model.nr_class;
                double[] dec_values = new double[nr_class * (nr_class - 1) / 2];
                svm_predict_values(model, x, dec_values);

                int[] vote = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    vote[i] = 0;
                int pos = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        if (dec_values[pos++] > 0)
                            ++vote[i];
                        else
                            ++vote[j];
                    }

                int vote_max_idx = 0;
                for (i = 1; i < nr_class; i++)
                    if (vote[i] > vote[vote_max_idx])
                        vote_max_idx = i;
                return model.label[vote_max_idx];
            }
        }
 public SvmModelBuilder()
 {
     this.model = null;
 }
Esempio n. 12
0
 /// <summary>
 /// Train the SVM and save the model
 /// </summary>
 public void Train()
 {
     this.model = svm.svm_train(prob, param);
 }
Esempio n. 13
0
 /// <summary>
 /// Import the model from an xml file
 /// </summary>
 public void Import(string model_file_name)
 {
     try
     {
         XmlSerializer serializer = new XmlSerializer(typeof(svm_model));
         FileStream fs = new FileStream(model_file_name, FileMode.Open);
         this.model = (svm_model) serializer.Deserialize(fs);
         fs.Close();
     }
     catch (Exception ex)
     {
         throw new Exception("An error occured when importing svm model: " + ex.Message);
     }
 }
Esempio n. 14
0
        public void TrainLibSVM(double[][] vektoren, double[] labels, double currentC, double currentG, out int errorCount)
        {
            int         nrdocs = vektoren.Length;
            svm_problem prob   = new svm_problem();

            prob.l = vektoren.Length - 1;
            prob.y = labels;
            svm_node[][] nodes = new svm_node[nrdocs][];

            for (int i = 0; i < vektoren.Length; i++)
            {
                int dim = vektoren[i].Length;

                nodes[i] = new svm_node[dim + 1];

                for (int j = 0; j < dim; j++)
                {
                    svm_node n = new svm_node();
                    n.index         = j;
                    n.value_Renamed = vektoren[i][j];

                    nodes[i][j] = n;
                }
                svm_node ln = new svm_node();
                ln.index         = -1;
                ln.value_Renamed = 0;
                nodes[i][dim]    = ln;
            }

            prob.x = nodes;

            svm_parameter param = new svm_parameter();

            param.cache_size = 256.0;
            param.C          = 1000.0;
            //param.weight = new double[] { 1.0, 1.0, 1.0, 1.0, 1.0 };
            //param.weight_label = new int[] { 1, 1, 1, 1, 1 };
            param.svm_type    = svm_parameter.C_SVC;
            param.kernel_type = svm_parameter.SIGMOID;
            param.gamma       = 0.00000001;
            param.eps         = 0.0001;
            //param.nr_weight = 0;
            param.probability = 1;

            //double[] cs;
            //double[] gs;

            double[] cergs     = new double[labels.Length];
            int      minfehler = labels.Length;
            int      fehler    = 0;
            double   c         = 0.0;
            double   g         = 0.0;

            #region Parameterabstimmung
            //cs = new double[] { Math.Pow(2.0, -15.0), Math.Pow(2.0, -11.0), Math.Pow(2.0, -9.0), Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0), Math.Pow(2.0, 5.0), Math.Pow(2.0, 7.0), Math.Pow(2.0, 12.0), Math.Pow(2.0, 15.0) };
            //gs = new double[] { Math.Pow(2.0, -15.0), Math.Pow(2.0, -12.0), Math.Pow(2.0, -9.0), Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0) };
            //cs = new double[] { Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0), Math.Pow(2.0, 5.0), Math.Pow(2.0, 7.0), Math.Pow(2.0, 12.0) };
            //gs = new double[] { Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0) };

            //for (int i = 0; i < cs.Length; i++)
            //{
            //    param.C = cs[i];

            //    for (int j = 0; j < gs.Length; j++)
            //    {
            //        fehler = 0;
            //        param.gamma = gs[j];
            //        string res = svm.svm_check_parameter(prob, param);
            //        if (res == null)
            //        {
            //            svm.svm_cross_validation(prob, param, vektoren.Length/4, cergs);

            //            for (int k = 0; k < labels.Length; k++)
            //            {
            //                if (cergs[k] != labels[k])
            //                    fehler++;
            //            }
            //            if (fehler < minfehler)
            //            {
            //                minfehler = fehler;
            //                c = param.C;
            //                g = param.gamma;
            //            }
            //        }
            //    }
            //}

            param.C     = currentC;
            fehler      = 0;
            param.gamma = currentG;
            string res = svm.svm_check_parameter(prob, param);
            if (res == null)
            {
                svm.svm_cross_validation(prob, param, vektoren.Length / 4, cergs);

                for (int k = 0; k < labels.Length; k++)
                {
                    if (cergs[k] != labels[k])
                    {
                        fehler++;
                    }
                }
                if (fehler < minfehler)
                {
                    minfehler = fehler;
                    c         = param.C;
                    g         = param.gamma;
                }
            }

            #endregion

            #region Feinabstimmung
            //cs = new double[] { c * 0.3, c * 0.4, c * 0.5, c * 0.6, c * 0.7, c * 0.8, c * 0.9, c, c * 2.0, c * 3.0, c * 4.0, c * 5.0, c * 6.0 };
            //gs = new double[] { g * 0.5, g * 0.6, g * 0.7, g * 0.8, g * 0.9, g, g * 2.0, g * 3.0, g * 4.0 };
            double[] csF = new double[] { c * 0.6, c * 0.7, c * 0.8, c * 0.9, c, c * 2.0, c * 3.0 };
            double[] gsF = new double[] { g * 0.7, g * 0.8, g * 0.9, g, g * 2.0, g * 3.0 };

            for (int i = 0; i < csF.Length; i++)
            {
                param.C = csF[i];

                for (int j = 0; j < gsF.Length; j++)
                {
                    fehler      = 0;
                    param.gamma = gsF[j];
                    res         = svm.svm_check_parameter(prob, param);
                    if (res == null)
                    {
                        svm.svm_cross_validation(prob, param, vektoren.Length / 4, cergs);

                        for (int k = 0; k < labels.Length; k++)
                        {
                            if (cergs[k] != labels[k])
                            {
                                fehler++;
                            }
                        }
                        if (fehler < minfehler)
                        {
                            minfehler = fehler;
                            c         = param.C;
                            g         = param.gamma;
                        }
                    }
                    //Thread.Sleep(1);
                }
                //Thread.Sleep(10);
            }
            #endregion

            #region Superfeinabstimmung
            //cs = new double[] { c - 7.0, c - 6.0, c - 5.0, c - 4.0, c - 3.0, c - 2.0, c - 1.0, c, c + 1.0, c + 2.0, c + 3.0, c + 4.0, c + 5.0 };
            //gs = new double[] { g - 5.0, g - 4.0, g - 3.0, g - 2.0, g - 1.0, g, g + 1.0, g + 2.0, g + 3.0 };

            /*cs = new double[] { c - 1.0, c - 0.3, c - 0.1, c, c + 0.1, c + 0.3, c + 1.0, };
             * gs = new double[] { g - 1.0, g - 0.3, g - 0.1, g, g + 0.1, g + 0.3, g + 1.0 };
             * for (int i = 0; i < cs.Length; i++)
             * {
             *  param.C = cs[i];
             *
             *  for (int j = 0; j < gs.Length; j++)
             *  {
             *      fehler = 0;
             *      param.gamma = gs[j];
             *      string res = svm.svm_check_parameter(prob, param);
             *      if (res == null)
             *      {
             *          svm.svm_cross_validation(prob, param, 6, cergs);
             *
             *          for (int k = 0; k < labels.Length; k++)
             *          {
             *              if (cergs[k] != labels[k])
             *                  fehler++;
             *          }
             *          if (fehler < minfehler)
             *          {
             *              minfehler = fehler;
             *              c = param.C;
             *              g = param.gamma;
             *          }
             *      }
             *  }
             * }*/
            #endregion


            param.C     = c;
            param.gamma = g;

            this._model = new svm_model();
            this._model = svm.svm_train(prob, param);

            int      anzKlassen = svm.svm_get_nr_class(this._model);
            double[] probs      = new double[anzKlassen];

            double erg;
            erg = svm.svm_predict_probability(this._model, nodes[0], probs);
            //erg = svm.svm_predict_probability(this._model, nodes[11], probs);
            //klazzifiziere(this.testvektor);
            //klazzifiziere(vektoren[6]);

            errorCount = minfehler;
        }
Esempio n. 15
0
 public static double svm_get_svr_probability(svm_model model)
 {
     if ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) && model.probA != null)
         return model.probA[0];
     else
     {
         System.Console.Error.Write("Model doesn't contain information for SVR probability inference\n");
         return 0;
     }
 }
Esempio n. 16
0
        public static svm_model svm_load_model(System.String model_file_name)
        {
            //UPGRADE_TODO: The differences in the expected value  of parameters for constructor 'java.io.BufferedReader.BufferedReader'  may cause compilation errors.  'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1092_3"'
            //UPGRADE_WARNING: At least one expression was used more than once in the target code. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1181_3"'
            //UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1073_3"'
            /*Original System.IO.StreamReader fp = new System.IO.StreamReader(new System.IO.StreamReader(model_file_name, System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(model_file_name, System.Text.Encoding.Default).CurrentEncoding);*/
            System.IO.StreamReader fp = new System.IO.StreamReader(new System.IO.FileStream(model_file_name, System.IO.FileMode.Open));

            // read parameters

            svm_model model = new svm_model();
            svm_parameter param = new svm_parameter();
            model.param = param;
            model.rho = null;
            model.probA = null;
            model.probB = null;
            model.label = null;
            model.nSV = null;

            while (true)
            {
                System.String cmd = fp.ReadLine();
                System.String arg = cmd.Substring(cmd.IndexOf((System.Char) ' ') + 1);

                if (cmd.StartsWith("svm_type"))
                {
                    int i;
                    for (i = 0; i < svm_type_table.Length; i++)
                    {
                        if (arg.IndexOf(svm_type_table[i]) != - 1)
                        {
                            param.svm_type = i;
                            break;
                        }
                    }
                    if (i == svm_type_table.Length)
                    {
                        System.Console.Error.Write("unknown svm type.\n");
                        return null;
                    }
                }
                else if (cmd.StartsWith("kernel_type"))
                {
                    int i;
                    for (i = 0; i < kernel_type_table.Length; i++)
                    {
                        if (arg.IndexOf(kernel_type_table[i]) != - 1)
                        {
                            param.kernel_type = i;
                            break;
                        }
                    }
                    if (i == kernel_type_table.Length)
                    {
                        System.Console.Error.Write("unknown kernel function.\n");
                        return null;
                    }
                }
                else if (cmd.StartsWith("degree"))
                    param.degree = atof(arg);
                else if (cmd.StartsWith("gamma"))
                    param.gamma = atof(arg);
                else if (cmd.StartsWith("coef0"))
                    param.coef0 = atof(arg);
                else if (cmd.StartsWith("nr_class"))
                    model.nr_class = atoi(arg);
                else if (cmd.StartsWith("total_sv"))
                    model.l = atoi(arg);
                else if (cmd.StartsWith("rho"))
                {
                    int n = model.nr_class * (model.nr_class - 1) / 2;
                    model.rho = new double[n];
                    SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.rho[i] = atof(st.NextToken());
                }
                else if (cmd.StartsWith("label"))
                {
                    int n = model.nr_class;
                    model.label = new int[n];
                    SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.label[i] = atoi(st.NextToken());
                }
                else if (cmd.StartsWith("probA"))
                {
                    int n = model.nr_class * (model.nr_class - 1) / 2;
                    model.probA = new double[n];
                    SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.probA[i] = atof(st.NextToken());
                }
                else if (cmd.StartsWith("probB"))
                {
                    int n = model.nr_class * (model.nr_class - 1) / 2;
                    model.probB = new double[n];
                    SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.probB[i] = atof(st.NextToken());
                }
                else if (cmd.StartsWith("nr_sv"))
                {
                    int n = model.nr_class;
                    model.nSV = new int[n];
                    SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.nSV[i] = atoi(st.NextToken());
                }
                else if (cmd.StartsWith("SV"))
                {
                    break;
                }
                else
                {
                    System.Console.Error.Write("unknown text in model file\n");
                    return null;
                }
            }

            // read sv_coef and SV

            int m = model.nr_class - 1;
            int l = model.l;
            model.sv_coef = new double[m][];
            for (int i = 0; i < m; i++)
            {
                model.sv_coef[i] = new double[l];
            }
            model.SV = new svm_node[l][];

            for (int i = 0; i < l; i++)
            {
                System.String line = fp.ReadLine();
                SupportClass.Tokenizer st = new SupportClass.Tokenizer(line, " \t\n\r\f:");

                for (int k = 0; k < m; k++)
                    model.sv_coef[k][i] = atof(st.NextToken());
                int n = st.Count / 2;
                model.SV[i] = new svm_node[n];
                for (int j = 0; j < n; j++)
                {
                    model.SV[i][j] = new svm_node();
                    model.SV[i][j].index = atoi(st.NextToken());
                    model.SV[i][j].value = atof(st.NextToken());
                }
            }

            fp.Close();
            return model;
        }
        public void TrainModel(double[] labels, double[][] mlArray)
        {
            SvmProblemBuilder builder = new SvmProblemBuilder(labels, mlArray);
            svm_problem problem = builder.CreateProblem();

            svm_parameter param = new svm_parameter()
            {
                svm_type = 0,
                kernel_type = 0,
                cache_size = 512,
                eps = 0.1,
                C = 10,
                nr_weight = 0,
                weight_label = null,
                weight = null
            };

            this.model = svm.svm_train(problem, param);
        }
Esempio n. 18
0
        public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
        {
            if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) && model.probA != null && model.probB != null)
            {
                int i;
                int nr_class = model.nr_class;
                double[] dec_values = new double[nr_class * (nr_class - 1) / 2];
                svm_predict_values(model, x, dec_values);

                double min_prob = 1e-7;
                double[][] tmpArray = new double[nr_class][];
                for (int i2 = 0; i2 < nr_class; i2++)
                {
                    tmpArray[i2] = new double[nr_class];
                }
                double[][] pairwise_prob = tmpArray;

                int k = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        pairwise_prob[i][j] = System.Math.Min(System.Math.Max(sigmoid_predict(dec_values[k], model.probA[k], model.probB[k]), min_prob), 1 - min_prob);
                        pairwise_prob[j][i] = 1 - pairwise_prob[i][j];
                        k++;
                    }
                multiclass_probability(nr_class, pairwise_prob, prob_estimates);

                int prob_max_idx = 0;
                for (i = 1; i < nr_class; i++)
                    if (prob_estimates[i] > prob_estimates[prob_max_idx])
                        prob_max_idx = i;
                return model.label[prob_max_idx];
            }
            else
                return svm_predict(model, x);
        }
Esempio n. 19
0
	private static void  predict(System.IO.StreamReader input, System.IO.BinaryWriter output, svm_model model, int predict_probability)
	{
		int correct = 0;
		int total = 0;
		double error = 0;
		double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
		
		int svm_type = svm.svm_get_svm_type(model);
		int nr_class = svm.svm_get_nr_class(model);
		int[] labels = new int[nr_class];
		double[] prob_estimates = null;
		
		if (predict_probability == 1)
		{
			if (svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
			{
				System.Console.Out.Write("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + svm.svm_get_svr_probability(model) + "\n");
			}
			else
			{
				svm.svm_get_labels(model, labels);
				prob_estimates = new double[nr_class];
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write("labels");
				for (int j = 0; j < nr_class; j++)
				{
					//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
					output.Write(" " + labels[j]);
				}
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write("\n");
			}
		}
		while (true)
		{
			System.String line = input.ReadLine();
			if ((System.Object) line == null)
				break;
			
			SupportClass.Tokenizer st = new SupportClass.Tokenizer(line, " \t\n\r\f:");
			
			double target = atof(st.NextToken());
			int m = st.Count / 2;
			svm_node[] x = new svm_node[m];
			for (int j = 0; j < m; j++)
			{
				x[j] = new svm_node();
				x[j].index = atoi(st.NextToken());
				x[j].value_Renamed = atof(st.NextToken());
			}
			
			double v;
			if (predict_probability == 1 && (svm_type == svm_parameter.C_SVC || svm_type == svm_parameter.NU_SVC))
			{
				v = svm.svm_predict_probability(model, x, prob_estimates);
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write(v + " ");
				for (int j = 0; j < nr_class; j++)
				{
					//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
					output.Write(prob_estimates[j] + " ");
				}
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write("\n");
			}
			else
			{
				v = svm.svm_predict(model, x);
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write(v + "\n");
			}
			
			if (v == target)
				++correct;
			error += (v - target) * (v - target);
			sumv += v;
			sumy += target;
			sumvv += v * v;
			sumyy += target * target;
			sumvy += v * target;
			++total;
		}
		System.Console.Out.Write("Accuracy = " + (double) correct / total * 100 + "% (" + correct + "/" + total + ") (classification)\n");
		System.Console.Out.Write("Mean squared error = " + error / total + " (regression)\n");
		System.Console.Out.Write("Squared correlation coefficient = " + ((total * sumvy - sumv * sumy) * (total * sumvy - sumv * sumy)) / ((total * sumvv - sumv * sumv) * (total * sumyy - sumy * sumy)) + " (regression)\n");
	}
Esempio n. 20
0
        public static void svm_save_model(System.String model_file_name, svm_model model)
        {
            //UPGRADE_TODO: Class 'java.io.DataOutputStream' was converted to 'System.IO.BinaryWriter' which has a different behavior. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1073_javaioDataOutputStream_3"'
            //UPGRADE_TODO: Constructor 'java.io.FileOutputStream.FileOutputStream' was converted to 'System.IO.FileStream.FileStream' which has a different behavior. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1073_javaioFileOutputStreamFileOutputStream_javalangString_3"'
            /* Original System.IO.BinaryWriter fp = new System.IO.BinaryWriter(new System.IO.FileStream(model_file_name, System.IO.FileMode.Create));*/
            System.IO.StreamWriter fp = new System.IO.StreamWriter(new System.IO.FileStream(model_file_name, System.IO.FileMode.Create));

            svm_parameter param = model.param;

            fp.Write("svm_type " + svm_type_table[param.svm_type] + "\n");
            fp.Write("kernel_type " + kernel_type_table[param.kernel_type] + "\n");

            if (param.kernel_type == svm_parameter.POLY)
                fp.Write("degree " + param.degree + "\n");

            if (param.kernel_type == svm_parameter.POLY || param.kernel_type == svm_parameter.RBF || param.kernel_type == svm_parameter.SIGMOID)
                fp.Write("gamma " + param.gamma + "\n");

            if (param.kernel_type == svm_parameter.POLY || param.kernel_type == svm_parameter.SIGMOID)
                fp.Write("coef0 " + param.coef0 + "\n");

            int nr_class = model.nr_class;
            int l = model.l;
            fp.Write("nr_class " + nr_class + "\n");
            fp.Write("total_sv " + l + "\n");

            {
                fp.Write("rho");
                for (int i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    fp.Write(" " + model.rho[i]);
                fp.Write("\n");
            }

            if (model.label != null)
            {
                fp.Write("label");
                for (int i = 0; i < nr_class; i++)
                    fp.Write(" " + model.label[i]);
                fp.Write("\n");
            }

            if (model.probA != null)
            // regression has probA only
            {
                fp.Write("probA");
                for (int i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    fp.Write(" " + model.probA[i]);
                fp.Write("\n");
            }
            if (model.probB != null)
            {
                fp.Write("probB");
                for (int i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    fp.Write(" " + model.probB[i]);
                fp.Write("\n");
            }

            if (model.nSV != null)
            {
                fp.Write("nr_sv");
                for (int i = 0; i < nr_class; i++)
                    fp.Write(" " + model.nSV[i]);
                fp.Write("\n");
            }

            fp.Write("SV\n");
            double[][] sv_coef = model.sv_coef;
            svm_node[][] SV = model.SV;

            for (int i = 0; i < l; i++)
            {
                for (int j = 0; j < nr_class - 1; j++)
                    fp.Write(sv_coef[j][i] + " ");

                svm_node[] p = SV[i];
                for (int j = 0; j < p.Length; j++)
                    fp.Write(p[j].index + ":" + p[j].value + " ");
                fp.Write("\n");
            }

            fp.Close();
        }
Esempio n. 21
0
 public static int svm_check_probability_model(svm_model model)
 {
     if (((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) && model.probA != null && model.probB != null) || ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) && model.probA != null))
         return 1;
     else
         return 0;
 }
Esempio n. 22
0
    private static ArrayList[] predict(System.IO.StreamReader input, System.IO.StreamWriter output, svm_model model, int predict_probability)
    {
        //int correct = 0;
        //int total = 0;
        //double error = 0;
        //double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
        ArrayList[] arrResult = new ArrayList[2];//Mảng thứ 1 chứa kết quả thực sự, mảng thứ 2 chứa kết quả dự đoán
        arrResult[0] = new ArrayList();
        arrResult[1] = new ArrayList();

        int svm_type = svm.svm_get_svm_type(model);
        int nr_class = svm.svm_get_nr_class(model);
        int[] labels = new int[nr_class];
        double[] prob_estimates = null;

        if (predict_probability == 1)
        {
            if (svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
            {
                System.Console.Out.Write("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + svm.svm_get_svr_probability(model) + "\n");
            }
            else
            {
                svm.svm_get_labels(model, labels);
                prob_estimates = new double[nr_class];
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write("labels");
                for (int j = 0; j < nr_class; j++)
                {
                    //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                    output.Write(" " + labels[j]);
                }
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write("\n");
            }
        }
        #region [Thêm] Lấy thông tin tiền xử lý từ dòng đầu của file test
        System.String strPreprocess = input.ReadLine();
        #endregion

        while (true)
        {
            System.String line = input.ReadLine();
            if ((System.Object)line == null)
                break;

            SupportClass.Tokenizer st = new SupportClass.Tokenizer(line, " \t\n\r\f:");

            double target = atof(st.NextToken());
            int m = st.Count / 2;
            svm_node[] x = new svm_node[m];
            for (int j = 0; j < m; j++)
            {
                x[j] = new svm_node();
                x[j].index = atoi(st.NextToken());
                x[j].value_Renamed = atof(st.NextToken());
            }

            double v;
            if (predict_probability == 1 && (svm_type == svm_parameter.C_SVC || svm_type == svm_parameter.NU_SVC))
            {
                v = svm.svm_predict_probability(model, x, prob_estimates);
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write(v + " ");
                for (int j = 0; j < nr_class; j++)
                {
                    //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                    output.Write(prob_estimates[j] + " ");
                }
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write("\n");
            }
            else
            {
                v = svm.svm_predict(model, x);
                #region [Thêm] Chuyển về dữ liệu nguyên thủy dựa vào cách tiền xử lý
                string[] strItems = strPreprocess.Split(' ');
                double dblMin;
                double dblMax;
                double dblDiff;
                switch (strItems[0])
                {
                    case "ScaleByMinMax":
                        dblMin = Convert.ToDouble(strItems[1]);
                        dblMax = Convert.ToDouble(strItems[2]);
                        dblDiff = dblMax - dblMin;
                        v = v * dblDiff + dblMin;
                        target = target * dblDiff + dblMin;
                        break;
                    default:
                        break;
                }
                #endregion
                arrResult[0].Add(target);
                arrResult[1].Add(v);
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write(target + " " + v + "\n");
            }
            //#region [Thêm] Chuyển về dữ liệu nguyên thủy dựa vào cách tiền xử lý
            //string[] strItems = strPreprocess.Split(' ');
            //switch (strItems[0])
            //{
            //    case "Scale(0,1)":
            //        double dblMin = Convert.ToDouble(strItems[1]);
            //        double dblMax = Convert.ToDouble(strItems[2]);
            //        double dblDiff = dblMax - dblMin;
            //        v = (v - 0.15) * dblDiff / 0.7 + dblMin;
            //        target = (target - 0.15) * dblDiff / 0.7 + dblMin;
            //        break;
            //    default:
            //        break;
            //}
            //#endregion
            //arrResult[0].Add(target);
            //arrResult[1].Add(v);
            //if (v == target)
            //    ++correct;
            //error += (v - target) * (v - target);
            //sumv += v;
            //sumy += target;
            //sumvv += v * v;
            //sumyy += target * target;
            //sumvy += v * target;
            //++total;
        }
        return arrResult;
        //System.Console.Out.Write("Accuracy = " + (double) correct / total * 100 + "% (" + correct + "/" + total + ") (classification)\n");
        //System.Console.Out.Write("Mean squared error = " + error / total + " (regression)\n");
        //System.Console.Out.Write("Squared correlation coefficient = " + ((total * sumvy - sumv * sumy) * (total * sumvy - sumv * sumy)) / ((total * sumvv - sumv * sumv) * (total * sumyy - sumy * sumy)) + " (regression)\n");
    }
Esempio n. 23
0
 /// <summary>
 /// Train the SVM and save the model
 /// </summary>
 public void Train()
 {
     this.model = svm.svm_train(prob, param);
 }