コード例 #1
1
ファイル: SVC.cs プロジェクト: heulvxunda/libsvm.net
        public Dictionary<int, double> PredictProbabilities(svm_node[] x)
        {
            var probabilities = new Dictionary<int, double>();
            int nr_class = model.nr_class;

            double[] prob_estimates = new double[nr_class];
            int[] labels = new int[nr_class];
            svm.svm_get_labels(model, labels);

            var v = svm.svm_predict_probability(this.model, x, prob_estimates);
            for (int i = 0; i < nr_class; i++)
                probabilities.Add(labels[i], prob_estimates[i]);

            return probabilities;
        }
コード例 #2
0
ファイル: SVC.cs プロジェクト: Jungwon/libsvm.net
        public override double Predict(svm_node[] x)
        {
            if (model == null)
                throw new Exception("No trained svm model");

            return svm.svm_predict(model, x);
        }
コード例 #3
0
        public void klazzifiziere(double[] wortvektor)
        {
            int dim = wortvektor.Length;

            svm_node[] nodes = new svm_node[dim + 1];

            for (int i = 0; i < dim; i++)
            {
                svm_node n = new svm_node();
                n.index         = i;
                n.value_Renamed = wortvektor[i];

                nodes[i] = n;
            }
            svm_node ln = new svm_node();

            ln.index         = -1;
            ln.value_Renamed = 0;
            nodes[dim]       = ln;

            int anzKlassen = svm.svm_get_nr_class(this._model);

            labels = new int[anzKlassen];
            probs  = new double[anzKlassen];

            svm.svm_get_labels(this._model, labels);
            erg = svm.svm_predict_probability(this._model, nodes, probs);
        }
コード例 #4
0
ファイル: C_SVC_Tests.cs プロジェクト: Jungwon/libsvm.net
 private void checkXOR(SVM svm)
 {
     var predictions = new double[2, 2];
     for (int i = 0; i < 2; i++)
     {
         for (int j = 0; j < 2; j++)
         {
             var A = new svm_node() {index = 1, value = i == 0 ? -1 : 1};
             var B = new svm_node() {index = 2, value = j == 0 ? -1 : 1};
             predictions[i, j] = svm.Predict(new svm_node[] {A, B});
         }
     }
     Assert.AreEqual(predictions[0, 0], 0);
     Assert.AreEqual(predictions[0, 1], 1);
     Assert.AreEqual(predictions[1, 0], 1);
     Assert.AreEqual(predictions[1, 1], 0);
 }
コード例 #5
0
 public static void WriteProblem(string outputFileName, svm_problem prob)
 {
     using (var sw = new StreamWriter(outputFileName))
     {
         for (int i = 0; i < prob.l; i++)
         {
             var sb = new StringBuilder();
             sb.AppendFormat("{0} ", prob.y[i]);
             for (int j = 0; j < prob.x[i].Count(); j++)
             {
                 svm_node node = prob.x[i][j];
                 sb.AppendFormat("{0}:{1} ", node.index, node.value);
             }
             sw.WriteLine(sb.ToString().Trim());
         }
         sw.Close();
     }
 }
コード例 #6
0
ファイル: svm.cs プロジェクト: EE590-Spring2014/libsvm
        public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
        {
            if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) && model.probA != null && model.probB != null)
            {
                int i;
                int nr_class = model.nr_class;
                double[] dec_values = new double[nr_class * (nr_class - 1) / 2];
                svm_predict_values(model, x, dec_values);

                double min_prob = 1e-7;
                double[][] tmpArray = new double[nr_class][];
                for (int i2 = 0; i2 < nr_class; i2++)
                {
                    tmpArray[i2] = new double[nr_class];
                }
                double[][] pairwise_prob = tmpArray;

                int k = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        pairwise_prob[i][j] = System.Math.Min(System.Math.Max(sigmoid_predict(dec_values[k], model.probA[k], model.probB[k]), min_prob), 1 - min_prob);
                        pairwise_prob[j][i] = 1 - pairwise_prob[i][j];
                        k++;
                    }
                multiclass_probability(nr_class, pairwise_prob, prob_estimates);

                int prob_max_idx = 0;
                for (i = 1; i < nr_class; i++)
                    if (prob_estimates[i] > prob_estimates[prob_max_idx])
                        prob_max_idx = i;
                return model.label[prob_max_idx];
            }
            else
                return svm_predict(model, x);
        }
コード例 #7
0
ファイル: svm.cs プロジェクト: EE590-Spring2014/libsvm
        public static void svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
        {
            if (model.param.svm_type == svm_parameter.ONE_CLASS || model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR)
            {
                double[] sv_coef = model.sv_coef[0];
                double sum = 0;
                for (int i = 0; i < model.l; i++)
                    sum += sv_coef[i] * Kernel.k_function(x, model.SV[i], model.param);
                sum -= model.rho[0];
                dec_values[0] = sum;
            }
            else
            {
                int i;
                int nr_class = model.nr_class;
                int l = model.l;

                double[] kvalue = new double[l];
                for (i = 0; i < l; i++)
                    kvalue[i] = Kernel.k_function(x, model.SV[i], model.param);

                int[] start = new int[nr_class];
                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + model.nSV[i - 1];

                int p = 0;
                int pos = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        double sum = 0;
                        int si = start[i];
                        int sj = start[j];
                        int ci = model.nSV[i];
                        int cj = model.nSV[j];

                        int k;
                        double[] coef1 = model.sv_coef[j - 1];
                        double[] coef2 = model.sv_coef[i];
                        for (k = 0; k < ci; k++)
                            sum += coef1[si + k] * kvalue[si + k];
                        for (k = 0; k < cj; k++)
                            sum += coef2[sj + k] * kvalue[sj + k];
                        sum -= model.rho[p++];
                        dec_values[pos++] = sum;
                    }
            }
        }
コード例 #8
0
ファイル: svm.cs プロジェクト: EE590-Spring2014/libsvm
        internal static double k_function(svm_node[] x, svm_node[] y, svm_parameter param)
        {
            switch (param.kernel_type)
            {

                case svm_parameter.LINEAR:
                    return dot(x, y);

                case svm_parameter.POLY:
                    return System.Math.Pow(param.gamma * dot(x, y) + param.coef0, param.degree);

                case svm_parameter.RBF:
                {
                    double sum = 0;
                    int xlen = x.Length;
                    int ylen = y.Length;
                    int i = 0;
                    int j = 0;
                    while (i < xlen && j < ylen)
                    {
                        if (x[i].index == y[j].index)
                        {
                            double d = x[i++].value - y[j++].value;
                            sum += d * d;
                        }
                        else if (x[i].index > y[j].index)
                        {
                            sum += y[j].value * y[j].value;
                            ++j;
                        }
                        else
                        {
                            sum += x[i].value * x[i].value;
                            ++i;
                        }
                    }

                    while (i < xlen)
                    {
                        sum += x[i].value * x[i].value;
                        ++i;
                    }

                    while (j < ylen)
                    {
                        sum += y[j].value * y[j].value;
                        ++j;
                    }

                    return System.Math.Exp((- param.gamma) * sum);
                }

                case svm_parameter.SIGMOID:
                    return tanh(param.gamma * dot(x, y) + param.coef0);

                default:
                    return 0; // java

            }
        }
コード例 #9
0
ファイル: svm.cs プロジェクト: EE590-Spring2014/libsvm
        public static double svm_predict(svm_model model, svm_node[] x)
        {
            if (model.param.svm_type == svm_parameter.ONE_CLASS || model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR)
            {
                double[] res = new double[1];
                svm_predict_values(model, x, res);

                if (model.param.svm_type == svm_parameter.ONE_CLASS)
                    return (res[0] > 0)?1:- 1;
                else
                    return res[0];
            }
            else
            {
                int i;
                int nr_class = model.nr_class;
                double[] dec_values = new double[nr_class * (nr_class - 1) / 2];
                svm_predict_values(model, x, dec_values);

                int[] vote = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    vote[i] = 0;
                int pos = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        if (dec_values[pos++] > 0)
                            ++vote[i];
                        else
                            ++vote[j];
                    }

                int vote_max_idx = 0;
                for (i = 1; i < nr_class; i++)
                    if (vote[i] > vote[vote_max_idx])
                        vote_max_idx = i;
                return model.label[vote_max_idx];
            }
        }
コード例 #10
0
ファイル: svm.cs プロジェクト: EE590-Spring2014/libsvm
        internal Kernel(int l, svm_node[][] x_, svm_parameter param)
        {
            this.kernel_type = param.kernel_type;
            this.degree = param.degree;
            this.gamma = param.gamma;
            this.coef0 = param.coef0;

            x = (svm_node[][]) x_.Clone();

            if (kernel_type == svm_parameter.RBF)
            {
                x_square = new double[l];
                for (int i = 0; i < l; i++)
                    x_square[i] = dot(x[i], x[i]);
            }
            else
                x_square = null;
        }
コード例 #11
0
ファイル: svm.cs プロジェクト: EE590-Spring2014/libsvm
 internal static double dot(svm_node[] x, svm_node[] y)
 {
     double sum = 0;
     int xlen = x.Length;
     int ylen = y.Length;
     int i = 0;
     int j = 0;
     while (i < xlen && j < ylen)
     {
         if (x[i].index == y[j].index)
             sum += x[i++].value * y[j++].value;
         else
         {
             if (x[i].index > y[j].index)
                 ++j;
             else
                 ++i;
         }
     }
     return sum;
 }
コード例 #12
0
	private static void  predict(System.IO.StreamReader input, System.IO.BinaryWriter output, svm_model model, int predict_probability)
	{
		int correct = 0;
		int total = 0;
		double error = 0;
		double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
		
		int svm_type = svm.svm_get_svm_type(model);
		int nr_class = svm.svm_get_nr_class(model);
		int[] labels = new int[nr_class];
		double[] prob_estimates = null;
		
		if (predict_probability == 1)
		{
			if (svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
			{
				System.Console.Out.Write("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + svm.svm_get_svr_probability(model) + "\n");
			}
			else
			{
				svm.svm_get_labels(model, labels);
				prob_estimates = new double[nr_class];
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write("labels");
				for (int j = 0; j < nr_class; j++)
				{
					//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
					output.Write(" " + labels[j]);
				}
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write("\n");
			}
		}
		while (true)
		{
			System.String line = input.ReadLine();
			if ((System.Object) line == null)
				break;
			
			SupportClass.Tokenizer st = new SupportClass.Tokenizer(line, " \t\n\r\f:");
			
			double target = atof(st.NextToken());
			int m = st.Count / 2;
			svm_node[] x = new svm_node[m];
			for (int j = 0; j < m; j++)
			{
				x[j] = new svm_node();
				x[j].index = atoi(st.NextToken());
				x[j].value_Renamed = atof(st.NextToken());
			}
			
			double v;
			if (predict_probability == 1 && (svm_type == svm_parameter.C_SVC || svm_type == svm_parameter.NU_SVC))
			{
				v = svm.svm_predict_probability(model, x, prob_estimates);
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write(v + " ");
				for (int j = 0; j < nr_class; j++)
				{
					//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
					output.Write(prob_estimates[j] + " ");
				}
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write("\n");
			}
			else
			{
				v = svm.svm_predict(model, x);
				//UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
				output.Write(v + "\n");
			}
			
			if (v == target)
				++correct;
			error += (v - target) * (v - target);
			sumv += v;
			sumy += target;
			sumvv += v * v;
			sumyy += target * target;
			sumvy += v * target;
			++total;
		}
		System.Console.Out.Write("Accuracy = " + (double) correct / total * 100 + "% (" + correct + "/" + total + ") (classification)\n");
		System.Console.Out.Write("Mean squared error = " + error / total + " (regression)\n");
		System.Console.Out.Write("Squared correlation coefficient = " + ((total * sumvy - sumv * sumy) * (total * sumvy - sumv * sumy)) / ((total * sumvv - sumv * sumv) * (total * sumyy - sumy * sumy)) + " (regression)\n");
	}
コード例 #13
0
	// read in a problem (in svmlight format)
	
	private void  read_problem()
	{
		/* UPGRADE_TODO: Expected value of parameters of constructor
		 * 'java.io.BufferedReader.BufferedReader' are different in the equivalent in .NET.
		 * 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1092"'
		 */
		System.IO.StreamReader fp = new System.IO.StreamReader(input_file_name);
		System.Collections.ArrayList vy = new System.Collections.ArrayList(10);
		System.Collections.ArrayList vx = new System.Collections.ArrayList(10);
		int max_index = 0;
		
		while (true)
		{
			System.String line = fp.ReadLine();
			if ((System.Object) line == null)
				break;
			
			SupportClass.Tokenizer st = new SupportClass.Tokenizer(line, " \t\n\r\f:");
			
			vy.Add(st.NextToken());
			int m = st.Count / 2;
			svm_node[] x = new svm_node[m];
			for (int j = 0; j < m; j++)
			{
				x[j] = new svm_node();
				x[j].index = atoi(st.NextToken());
				x[j].value_Renamed = atof(st.NextToken());
			}
			if (m > 0)
				max_index = System.Math.Max(max_index, x[m - 1].index);
			vx.Add(x);
		}
		
		prob = new svm_problem();
		prob.l = vy.Count;
		prob.x = new svm_node[prob.l][];
		for (int i = 0; i < prob.l; i++)
			prob.x[i] = (svm_node[]) vx[i];
		prob.y = new double[prob.l];
		for (int i = 0; i < prob.l; i++)
			prob.y[i] = atof((System.String) vy[i]);
		
		if (param.gamma == 0)
			param.gamma = 1.0 / max_index;
		
		fp.Close();
	}
コード例 #14
0
ファイル: svm.cs プロジェクト: EE590-Spring2014/libsvm
        //
        // Interface functions
        //
        public static svm_model svm_train(svm_problem prob, svm_parameter param, TrainingProgressEvent progressEvent = null)
        {
            svm_model model = new svm_model();
            model.param = param;

            if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR)
            {
                // regression or one-class-svm
                model.nr_class = 2;
                model.label = null;
                model.nSV = null;
                model.probA = null; model.probB = null;
                model.sv_coef = new double[1][];

                if (param.probability == 1 && (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR))
                {
                    model.probA = new double[1];
                    model.probA[0] = svm_svr_probability(prob, param);
                }

                decision_function f = svm_train_one(prob, param, 0, 0);
                model.rho = new double[1];
                model.rho[0] = f.rho;

                int nSV = 0;
                int i;
                for (i = 0; i < prob.l; i++)
                    if (System.Math.Abs(f.alpha[i]) > 0)
                        ++nSV;
                model.l = nSV;
                model.SV = new svm_node[nSV][];
                model.sv_coef[0] = new double[nSV];
                int j = 0;
                for (i = 0; i < prob.l; i++)
                    if (System.Math.Abs(f.alpha[i]) > 0)
                    {
                        model.SV[j] = prob.x[i];
                        model.sv_coef[0][j] = f.alpha[i];
                        ++j;
                    }
            }
            else
            {
                // classification
                // find out the number of classes
                int l = prob.l;
                int max_nr_class = 16;
                int nr_class = 0;
                int[] label = new int[max_nr_class];
                int[] count = new int[max_nr_class];
                int[] index = new int[l];

                int i;
                for (i = 0; i < l; i++)
                {
                    //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
                    int this_label = (int)prob.y[i];
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (this_label == label[j])
                        {
                            ++count[j];
                            break;
                        }
                    index[i] = j;
                    if (j == nr_class)
                    {
                        if (nr_class == max_nr_class)
                        {
                            max_nr_class *= 2;
                            int[] new_data = new int[max_nr_class];
                            Array.Copy(label, 0, new_data, 0, label.Length);
                            label = new_data;

                            new_data = new int[max_nr_class];
                            Array.Copy(count, 0, new_data, 0, count.Length);
                            count = new_data;
                        }
                        label[nr_class] = this_label;
                        count[nr_class] = 1;
                        ++nr_class;
                    }
                }

                // group training data of the same class

                int[] start = new int[nr_class];
                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                svm_node[][] x = new svm_node[l][];

                for (i = 0; i < l; i++)
                {
                    x[start[index[i]]] = prob.x[i];
                    ++start[index[i]];
                }

                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                // calculate weighted C

                double[] weighted_C = new double[nr_class];
                for (i = 0; i < nr_class; i++)
                    weighted_C[i] = param.C;
                for (i = 0; i < param.nr_weight; i++)
                {
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (param.weight_label[i] == label[j])
                            break;
                    if (j == nr_class)
                        System.Console.Error.Write("warning: class label " + param.weight_label[i] + " specified in weight is not found\n");
                    else
                        weighted_C[j] *= param.weight[i];
                }

                // train k*(k-1)/2 models

                bool[] nonzero = new bool[l];
                for (i = 0; i < l; i++)
                    nonzero[i] = false;
                decision_function[] f = new decision_function[nr_class * (nr_class - 1) / 2];

                double[] probA = null, probB = null;
                if (param.probability == 1)
                {
                    probA = new double[nr_class * (nr_class - 1) / 2];
                    probB = new double[nr_class * (nr_class - 1) / 2];
                }

                int p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        svm_problem sub_prob = new svm_problem();
                        int si = start[i], sj = start[j];
                        int ci = count[i], cj = count[j];
                        sub_prob.l = ci + cj;
                        sub_prob.x = new svm_node[sub_prob.l][];
                        sub_prob.y = new double[sub_prob.l];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            sub_prob.x[k] = x[si + k];
                            sub_prob.y[k] = +1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            sub_prob.x[ci + k] = x[sj + k];
                            sub_prob.y[ci + k] = -1;
                        }

                        if (param.probability == 1)
                        {
                            double[] probAB = new double[2];
                            svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
                            probA[p] = probAB[0];
                            probB[p] = probAB[1];
                        }

                        f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);

                        for (k = 0; k < ci; k++)
                            if (!nonzero[si + k] && System.Math.Abs(f[p].alpha[k]) > 0)
                                nonzero[si + k] = true;
                        for (k = 0; k < cj; k++)
                            if (!nonzero[sj + k] && System.Math.Abs(f[p].alpha[ci + k]) > 0)
                                nonzero[sj + k] = true;
                        ++p;
                    }

                // build output

                model.nr_class = nr_class;

                model.label = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    model.label[i] = label[i];

                model.rho = new double[nr_class * (nr_class - 1) / 2];
                for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    model.rho[i] = f[i].rho;

                if (param.probability == 1)
                {
                    model.probA = new double[nr_class * (nr_class - 1) / 2];
                    model.probB = new double[nr_class * (nr_class - 1) / 2];
                    for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    {
                        model.probA[i] = probA[i];
                        model.probB[i] = probB[i];
                    }
                }
                else
                {
                    model.probA = null;
                    model.probB = null;
                }

                int nnz = 0;
                int[] nz_count = new int[nr_class];
                model.nSV = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    int nSV = 0;
                    for (int j = 0; j < count[i]; j++)
                        if (nonzero[start[i] + j])
                        {
                            ++nSV;
                            ++nnz;
                        }
                    model.nSV[i] = nSV;
                    nz_count[i] = nSV;
                }

                //Debug.WriteLine("Total nSV = " + nnz + "\n");

                model.l = nnz;
                model.SV = new svm_node[nnz][];
                p = 0;
                for (i = 0; i < l; i++)
                    if (nonzero[i])
                        model.SV[p++] = x[i];

                int[] nz_start = new int[nr_class];
                nz_start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

                model.sv_coef = new double[nr_class - 1][];
                for (i = 0; i < nr_class - 1; i++)
                    model.sv_coef[i] = new double[nnz];

                p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]

                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];

                        int q = nz_start[i];
                        int k;
                        for (k = 0; k < ci; k++)
                            if (nonzero[si + k])
                                model.sv_coef[j - 1][q++] = f[p].alpha[k];
                        q = nz_start[j];
                        for (k = 0; k < cj; k++)
                            if (nonzero[sj + k])
                                model.sv_coef[i][q++] = f[p].alpha[ci + k];
                        ++p;
                    }
            }
            return model;
        }
コード例 #15
0
        public void TrainLibSVM(double[][] vektoren, double[] labels, double currentC, double currentG, out int errorCount)
        {
            int         nrdocs = vektoren.Length;
            svm_problem prob   = new svm_problem();

            prob.l = vektoren.Length - 1;
            prob.y = labels;
            svm_node[][] nodes = new svm_node[nrdocs][];

            for (int i = 0; i < vektoren.Length; i++)
            {
                int dim = vektoren[i].Length;

                nodes[i] = new svm_node[dim + 1];

                for (int j = 0; j < dim; j++)
                {
                    svm_node n = new svm_node();
                    n.index         = j;
                    n.value_Renamed = vektoren[i][j];

                    nodes[i][j] = n;
                }
                svm_node ln = new svm_node();
                ln.index         = -1;
                ln.value_Renamed = 0;
                nodes[i][dim]    = ln;
            }

            prob.x = nodes;

            svm_parameter param = new svm_parameter();

            param.cache_size = 256.0;
            param.C          = 1000.0;
            //param.weight = new double[] { 1.0, 1.0, 1.0, 1.0, 1.0 };
            //param.weight_label = new int[] { 1, 1, 1, 1, 1 };
            param.svm_type    = svm_parameter.C_SVC;
            param.kernel_type = svm_parameter.SIGMOID;
            param.gamma       = 0.00000001;
            param.eps         = 0.0001;
            //param.nr_weight = 0;
            param.probability = 1;

            //double[] cs;
            //double[] gs;

            double[] cergs     = new double[labels.Length];
            int      minfehler = labels.Length;
            int      fehler    = 0;
            double   c         = 0.0;
            double   g         = 0.0;

            #region Parameterabstimmung
            //cs = new double[] { Math.Pow(2.0, -15.0), Math.Pow(2.0, -11.0), Math.Pow(2.0, -9.0), Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0), Math.Pow(2.0, 5.0), Math.Pow(2.0, 7.0), Math.Pow(2.0, 12.0), Math.Pow(2.0, 15.0) };
            //gs = new double[] { Math.Pow(2.0, -15.0), Math.Pow(2.0, -12.0), Math.Pow(2.0, -9.0), Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0) };
            //cs = new double[] { Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0), Math.Pow(2.0, 5.0), Math.Pow(2.0, 7.0), Math.Pow(2.0, 12.0) };
            //gs = new double[] { Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0) };

            //for (int i = 0; i < cs.Length; i++)
            //{
            //    param.C = cs[i];

            //    for (int j = 0; j < gs.Length; j++)
            //    {
            //        fehler = 0;
            //        param.gamma = gs[j];
            //        string res = svm.svm_check_parameter(prob, param);
            //        if (res == null)
            //        {
            //            svm.svm_cross_validation(prob, param, vektoren.Length/4, cergs);

            //            for (int k = 0; k < labels.Length; k++)
            //            {
            //                if (cergs[k] != labels[k])
            //                    fehler++;
            //            }
            //            if (fehler < minfehler)
            //            {
            //                minfehler = fehler;
            //                c = param.C;
            //                g = param.gamma;
            //            }
            //        }
            //    }
            //}

            param.C     = currentC;
            fehler      = 0;
            param.gamma = currentG;
            string res = svm.svm_check_parameter(prob, param);
            if (res == null)
            {
                svm.svm_cross_validation(prob, param, vektoren.Length / 4, cergs);

                for (int k = 0; k < labels.Length; k++)
                {
                    if (cergs[k] != labels[k])
                    {
                        fehler++;
                    }
                }
                if (fehler < minfehler)
                {
                    minfehler = fehler;
                    c         = param.C;
                    g         = param.gamma;
                }
            }

            #endregion

            #region Feinabstimmung
            //cs = new double[] { c * 0.3, c * 0.4, c * 0.5, c * 0.6, c * 0.7, c * 0.8, c * 0.9, c, c * 2.0, c * 3.0, c * 4.0, c * 5.0, c * 6.0 };
            //gs = new double[] { g * 0.5, g * 0.6, g * 0.7, g * 0.8, g * 0.9, g, g * 2.0, g * 3.0, g * 4.0 };
            double[] csF = new double[] { c * 0.6, c * 0.7, c * 0.8, c * 0.9, c, c * 2.0, c * 3.0 };
            double[] gsF = new double[] { g * 0.7, g * 0.8, g * 0.9, g, g * 2.0, g * 3.0 };

            for (int i = 0; i < csF.Length; i++)
            {
                param.C = csF[i];

                for (int j = 0; j < gsF.Length; j++)
                {
                    fehler      = 0;
                    param.gamma = gsF[j];
                    res         = svm.svm_check_parameter(prob, param);
                    if (res == null)
                    {
                        svm.svm_cross_validation(prob, param, vektoren.Length / 4, cergs);

                        for (int k = 0; k < labels.Length; k++)
                        {
                            if (cergs[k] != labels[k])
                            {
                                fehler++;
                            }
                        }
                        if (fehler < minfehler)
                        {
                            minfehler = fehler;
                            c         = param.C;
                            g         = param.gamma;
                        }
                    }
                    //Thread.Sleep(1);
                }
                //Thread.Sleep(10);
            }
            #endregion

            #region Superfeinabstimmung
            //cs = new double[] { c - 7.0, c - 6.0, c - 5.0, c - 4.0, c - 3.0, c - 2.0, c - 1.0, c, c + 1.0, c + 2.0, c + 3.0, c + 4.0, c + 5.0 };
            //gs = new double[] { g - 5.0, g - 4.0, g - 3.0, g - 2.0, g - 1.0, g, g + 1.0, g + 2.0, g + 3.0 };

            /*cs = new double[] { c - 1.0, c - 0.3, c - 0.1, c, c + 0.1, c + 0.3, c + 1.0, };
             * gs = new double[] { g - 1.0, g - 0.3, g - 0.1, g, g + 0.1, g + 0.3, g + 1.0 };
             * for (int i = 0; i < cs.Length; i++)
             * {
             *  param.C = cs[i];
             *
             *  for (int j = 0; j < gs.Length; j++)
             *  {
             *      fehler = 0;
             *      param.gamma = gs[j];
             *      string res = svm.svm_check_parameter(prob, param);
             *      if (res == null)
             *      {
             *          svm.svm_cross_validation(prob, param, 6, cergs);
             *
             *          for (int k = 0; k < labels.Length; k++)
             *          {
             *              if (cergs[k] != labels[k])
             *                  fehler++;
             *          }
             *          if (fehler < minfehler)
             *          {
             *              minfehler = fehler;
             *              c = param.C;
             *              g = param.gamma;
             *          }
             *      }
             *  }
             * }*/
            #endregion


            param.C     = c;
            param.gamma = g;

            this._model = new svm_model();
            this._model = svm.svm_train(prob, param);

            int      anzKlassen = svm.svm_get_nr_class(this._model);
            double[] probs      = new double[anzKlassen];

            double erg;
            erg = svm.svm_predict_probability(this._model, nodes[0], probs);
            //erg = svm.svm_predict_probability(this._model, nodes[11], probs);
            //klazzifiziere(this.testvektor);
            //klazzifiziere(vektoren[6]);

            errorCount = minfehler;
        }
コード例 #16
0
 private void WriteArray(BinaryWriter writer, svm_node[][] array)
 {
     if (array == null)
     {
         writer.Write(false);
     }
     else
     {
         writer.Write(true);
         writer.Write(array.Length);
         for (int i = 0; i < array.Length; ++i)
         {
             writer.Write(array[i].Length);
             for (int j = 0; j < array[i].Length; ++j)
             {
                 writer.Write(array[i][j].index);
                 writer.Write(array[i][j].value_Renamed);
             }
         }
     }
 }
コード例 #17
0
        private svm_node[][] ReadSvmNodeArray(BinaryReader reader)
        {
            bool isNull = !reader.ReadBoolean();
            if (isNull)
            {
                return null;
            }
            else
            {
                int length = reader.ReadInt32();
                svm_node[][] array = new svm_node[length][];
                for (int i = 0; i < length; i++)
                {
                    int sub_length = reader.ReadInt32();
                    array[i] = new svm_node[sub_length];

                    for (int j = 0; j < sub_length; j++)
                    {
                        svm_node node = new svm_node();
                        node.index = reader.ReadInt32();
                        node.value_Renamed = reader.ReadDouble();
                        array[i][j] = node;
                    }
                }
                return array;
            }
        }
コード例 #18
0
ファイル: SVR.cs プロジェクト: heulvxunda/libsvm.net
 public override double Predict(svm_node[] x)
 {
     return svm.svm_predict(this.model, x);
 }
コード例 #19
0
ファイル: SVM.cs プロジェクト: heulvxunda/libsvm.net
 /// <summary>
 /// Provides the prediction
 /// </summary>
 public abstract double Predict(svm_node[] x);
コード例 #20
0
ファイル: SVC.cs プロジェクト: heulvxunda/libsvm.net
 public override double Predict(svm_node[] x)
 {
     var probabilities = PredictProbabilities(x);
     var max = probabilities.Aggregate((a, b)=> a.Value > b.Value ? a : b);
     return max.Key;
 }
コード例 #21
0
ファイル: svm_predict.cs プロジェクト: hksonngan/mytesgnikrow
    private static ArrayList[] predict(System.IO.StreamReader input, System.IO.StreamWriter output, svm_model model, int predict_probability)
    {
        //int correct = 0;
        //int total = 0;
        //double error = 0;
        //double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
        ArrayList[] arrResult = new ArrayList[2];//Mảng thứ 1 chứa kết quả thực sự, mảng thứ 2 chứa kết quả dự đoán
        arrResult[0] = new ArrayList();
        arrResult[1] = new ArrayList();

        int svm_type = svm.svm_get_svm_type(model);
        int nr_class = svm.svm_get_nr_class(model);
        int[] labels = new int[nr_class];
        double[] prob_estimates = null;

        if (predict_probability == 1)
        {
            if (svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
            {
                System.Console.Out.Write("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + svm.svm_get_svr_probability(model) + "\n");
            }
            else
            {
                svm.svm_get_labels(model, labels);
                prob_estimates = new double[nr_class];
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write("labels");
                for (int j = 0; j < nr_class; j++)
                {
                    //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                    output.Write(" " + labels[j]);
                }
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write("\n");
            }
        }
        #region [Thêm] Lấy thông tin tiền xử lý từ dòng đầu của file test
        System.String strPreprocess = input.ReadLine();
        #endregion

        while (true)
        {
            System.String line = input.ReadLine();
            if ((System.Object)line == null)
                break;

            SupportClass.Tokenizer st = new SupportClass.Tokenizer(line, " \t\n\r\f:");

            double target = atof(st.NextToken());
            int m = st.Count / 2;
            svm_node[] x = new svm_node[m];
            for (int j = 0; j < m; j++)
            {
                x[j] = new svm_node();
                x[j].index = atoi(st.NextToken());
                x[j].value_Renamed = atof(st.NextToken());
            }

            double v;
            if (predict_probability == 1 && (svm_type == svm_parameter.C_SVC || svm_type == svm_parameter.NU_SVC))
            {
                v = svm.svm_predict_probability(model, x, prob_estimates);
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write(v + " ");
                for (int j = 0; j < nr_class; j++)
                {
                    //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                    output.Write(prob_estimates[j] + " ");
                }
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write("\n");
            }
            else
            {
                v = svm.svm_predict(model, x);
                #region [Thêm] Chuyển về dữ liệu nguyên thủy dựa vào cách tiền xử lý
                string[] strItems = strPreprocess.Split(' ');
                double dblMin;
                double dblMax;
                double dblDiff;
                switch (strItems[0])
                {
                    case "ScaleByMinMax":
                        dblMin = Convert.ToDouble(strItems[1]);
                        dblMax = Convert.ToDouble(strItems[2]);
                        dblDiff = dblMax - dblMin;
                        v = v * dblDiff + dblMin;
                        target = target * dblDiff + dblMin;
                        break;
                    default:
                        break;
                }
                #endregion
                arrResult[0].Add(target);
                arrResult[1].Add(v);
                //UPGRADE_ISSUE: Method 'java.io.DataOutputStream.Write' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaioDataOutputStreamWrite_javalangString"'
                output.Write(target + " " + v + "\n");
            }
            //#region [Thêm] Chuyển về dữ liệu nguyên thủy dựa vào cách tiền xử lý
            //string[] strItems = strPreprocess.Split(' ');
            //switch (strItems[0])
            //{
            //    case "Scale(0,1)":
            //        double dblMin = Convert.ToDouble(strItems[1]);
            //        double dblMax = Convert.ToDouble(strItems[2]);
            //        double dblDiff = dblMax - dblMin;
            //        v = (v - 0.15) * dblDiff / 0.7 + dblMin;
            //        target = (target - 0.15) * dblDiff / 0.7 + dblMin;
            //        break;
            //    default:
            //        break;
            //}
            //#endregion
            //arrResult[0].Add(target);
            //arrResult[1].Add(v);
            //if (v == target)
            //    ++correct;
            //error += (v - target) * (v - target);
            //sumv += v;
            //sumy += target;
            //sumvv += v * v;
            //sumyy += target * target;
            //sumvy += v * target;
            //++total;
        }
        return arrResult;
        //System.Console.Out.Write("Accuracy = " + (double) correct / total * 100 + "% (" + correct + "/" + total + ") (classification)\n");
        //System.Console.Out.Write("Mean squared error = " + error / total + " (regression)\n");
        //System.Console.Out.Write("Squared correlation coefficient = " + ((total * sumvy - sumv * sumy) * (total * sumvy - sumv * sumy)) / ((total * sumvv - sumv * sumv) * (total * sumyy - sumy * sumy)) + " (regression)\n");
    }
コード例 #22
0
ファイル: HW7.cs プロジェクト: robinlan/R-Study-Lists
        static Tuple<double, double> RunPLAvsSVM(int experiments, int points)
        {
            const int TEST_POINTS = 10000;
              Random rnd = new Random();

              long svmWins = 0, svCount = 0;
              for (int i = 1; i <= experiments; i++)
              {
            //pick a random line y = a * x + b
            double x1 = rnd.NextDouble(), y1 = rnd.NextDouble(), x2 = rnd.NextDouble(), y2 = rnd.NextDouble();
            var Wf = new DenseVector(3);
            Wf[0] = 1;
            Wf[1] = (y1 - y2) / (x1 * y2 - y1 * x2);
            Wf[2] = (x2 - x1) / (x1 * y2 - y1 * x2);
            Func<MathNet.Numerics.LinearAlgebra.Generic.Vector<double>, int> f = x => Wf.DotProduct(x) >= 0 ? 1 : -1;

            //generate training set of N random points
            var X = new DenseMatrix(points, 3);
            do
              for (int j = 0; j < points; j++)
              {
            X[j, 0] = 1;
            X[j, 1] = rnd.NextDouble() * 2 - 1;
            X[j, 2] = rnd.NextDouble() * 2 - 1;
              }
            while (Enumerable.Range(0, X.RowCount).All(j => f(X.Row(0)) == f(X.Row(j))));

            var W = new DenseVector(3);
            Func<MathNet.Numerics.LinearAlgebra.Generic.Vector<double>, int> h = x => W.DotProduct(x) >= 0 ? 1 : -1;

            //run Perceptron
            int k = 1;
            while (Enumerable.Range(0, points).Any(j => h(X.Row(j)) != f(X.Row(j))))
            {
              //find all misclasified points
              int[] M = Enumerable.Range(0, points).Where(j => h(X.Row(j)) != f(X.Row(j))).ToArray();
              int m = M[rnd.Next(0, M.Length)];

              int sign = f(X.Row(m));
              W[0] += sign;
              W[1] += sign * X[m, 1];
              W[2] += sign * X[m, 2];
              k++;
            }

            //calculate P[f(Xtest) != h(Xtest)]
            DenseVector Xtest = new DenseVector(3);
            Xtest[0] = 1;
            int matches = 0;
            for (int j = 0; j < TEST_POINTS; j++)
            {
              Xtest[1] = rnd.NextDouble() * 2 - 1;
              Xtest[2] = rnd.NextDouble() * 2 - 1;
              if (f(Xtest) == h(Xtest)) matches++;
            }
            double Ppla = (matches + 0.0) / TEST_POINTS;

            //Run SVM
            var prob = new svm_problem() {
              x = Enumerable.Range(0, points).Select(j =>
            new svm_node[] {
              new svm_node() { index = 0, value = X[j, 1] },
              new svm_node() { index = 1, value = X[j, 2] } }).ToArray(),
              y = Enumerable.Range(0, points).Select(j => (double)f(X.Row(j))).ToArray(),
              l = points };

            var model = svm.svm_train(prob, new svm_parameter()
            {
              svm_type = (int)SvmType.C_SVC,
              kernel_type = (int)KernelType.LINEAR,
              C = 1000000,
              eps = 0.001,
              shrinking = 0
            });

            //calculate P[f(Xtest) != h_svm(Xtest)]
            svm_node[] Xsvm = new svm_node[] {
              new svm_node() { index = 0, value = 1.0 },
              new svm_node() { index = 1, value = 1.0 } };
            matches = 0;

            for (int j = 0; j < TEST_POINTS; j++)
            {
              Xtest[1] = rnd.NextDouble() * 2 - 1;
              Xsvm[0].value = Xtest[1];
              Xtest[2] = rnd.NextDouble() * 2 - 1;
              Xsvm[1].value = Xtest[2];
              if (f(Xtest) == (svm.svm_predict(model, Xsvm) > 0 ? 1 : -1)) matches++;
            }
            double Psvm = (matches + 0.0) / TEST_POINTS;

            svCount += model.l;
            if (Psvm >= Ppla) svmWins++;
              }

              return Tuple.Create((svmWins + 0.0) / experiments, (svCount + 0.0) / experiments);
        }