Inheritance: System.ICloneable
Beispiel #1
0
 internal xa68541a68dd6f460(svm_problem prob, svm_parameter param, sbyte[] y_)
     : base(prob.l, prob.x, param)
 {
     this.x1e218ceaee1bb583 = new sbyte[y_.Length];
     y_.CopyTo(this.x1e218ceaee1bb583, 0);
     this.x1f31bf6ca58166a1 = new xb730a77005d16cc1(prob.l, (int) (param.cache_size * 1048576.0));
 }
Beispiel #2
0
 internal xfbfe48e5ee40f893(svm_problem prob, svm_parameter param)
     : base(prob.l, prob.x, param)
 {
     int num;
     int num2;
     if (((uint) num) > uint.MaxValue)
     {
         goto Label_005C;
     }
     this.x9fc3ee03a439f6f0 = prob.l;
     if ((((uint) num) - ((uint) num2)) > uint.MaxValue)
     {
         if ((((uint) num2) + ((uint) num)) > uint.MaxValue)
         {
             goto Label_0115;
         }
     }
     else
     {
         goto Label_0115;
     }
     Label_0024:
     this.x5cafa8d49ea71ea1[num2] = new float[2 * this.x9fc3ee03a439f6f0];
     num2++;
     Label_003D:
     if (num2 < 2)
     {
         goto Label_0024;
     }
     this.xafb0a999075e2e6a = 0;
     if (0 == 0)
     {
         return;
     }
     goto Label_00C0;
     Label_005C:
     num2 = 0;
     goto Label_003D;
     Label_00C0:
     this.xc0c4c459c6ccbd00 = new int[2 * this.x9fc3ee03a439f6f0];
     for (num = 0; num < this.x9fc3ee03a439f6f0; num++)
     {
         this.x32dce50116aa0f1e[num] = 1;
         this.x32dce50116aa0f1e[num + this.x9fc3ee03a439f6f0] = -1;
         this.xc0c4c459c6ccbd00[num] = num;
         this.xc0c4c459c6ccbd00[num + this.x9fc3ee03a439f6f0] = num;
     }
     this.x5cafa8d49ea71ea1 = new float[2][];
     goto Label_005C;
     Label_0115:
     this.x1f31bf6ca58166a1 = new xb730a77005d16cc1(this.x9fc3ee03a439f6f0, (int) (param.cache_size * 1048576.0));
     this.x32dce50116aa0f1e = new sbyte[2 * this.x9fc3ee03a439f6f0];
     goto Label_00C0;
 }
Beispiel #3
0
 public SupportVectorMachine(svm_model theModel)
 {
     svm_node[][] sV;
     int num;
     svm_node[] _nodeArray3;
     int num2;
     this._model = theModel;
     this._paras = this._model.param;
     if (8 != 0)
     {
         goto Label_00A8;
     }
     return;
     Label_002C:
     if (num < sV.Length)
     {
         _nodeArray3 = sV[num];
         num2 = 0;
     }
     else
     {
         if ((0 == 0) && ((((uint) num2) + ((uint) num2)) > uint.MaxValue))
         {
             goto Label_002C;
         }
         if ((((uint) num2) - ((uint) num2)) >= 0)
         {
             return;
         }
         goto Label_00A8;
     }
     Label_005F:
     while (num2 < _nodeArray3.Length)
     {
         svm_node _node = _nodeArray3[num2];
         this._inputCount = Math.Max(_node.index, this._inputCount);
         num2++;
     }
     num++;
     goto Label_002C;
     Label_00A8:
     this._inputCount = 0;
     sV = this._model.SV;
     num = 0;
     if (((uint) num2) < 0)
     {
         goto Label_005F;
     }
     goto Label_002C;
 }
Beispiel #4
0
 internal xf094e3229d63c9be(int l, svm_node[][] x_, svm_parameter param)
 {
     int num;
     Label_00C5:
     this.xbcd9612004129925 = param.kernel_type;
     Label_0075:
     if ((((uint) num) | 0x80000000) != 0)
     {
         this.x83735b10b6a3d76d = param.degree;
         do
         {
             this.xc7c4e9c099884228 = param.gamma;
             this.x987b3666f8a37f90 = param.coef0;
             if (0xff != 0)
             {
                 this.x08db3aeabb253cb1 = (svm_node[][]) x_.Clone();
                 if (this.xbcd9612004129925 != 2)
                 {
                     this.x4f9e236643454e9f = null;
                     if (0 == 0)
                     {
                         if (0 == 0)
                         {
                             return;
                         }
                         goto Label_00C5;
                     }
                     goto Label_0075;
                 }
                 this.x4f9e236643454e9f = new double[l];
             }
         }
         while (2 == 0);
     }
     for (num = 0; num < l; num++)
     {
         this.x4f9e236643454e9f[num] = x99240096a9e3842c(this.x08db3aeabb253cb1[num], this.x08db3aeabb253cb1[num]);
     }
 }
Beispiel #5
0
        public static String svm_check_parameter(svm_problem prob, svm_parameter param)
        {
            // svm_type

            int svm_type = param.svm_type;
            if (svm_type != svm_parameter.C_SVC && svm_type != svm_parameter.NU_SVC &&
                svm_type != svm_parameter.ONE_CLASS && svm_type != svm_parameter.EPSILON_SVR &&
                svm_type != svm_parameter.NU_SVR)
                return "unknown svm type";

            // kernel_type

            int kernel_type = param.kernel_type;
            if (kernel_type != svm_parameter.LINEAR && kernel_type != svm_parameter.POLY &&
                kernel_type != svm_parameter.RBF && kernel_type != svm_parameter.SIGMOID)
                return "unknown kernel type";

            // cache_size,eps,C,nu,p,shrinking

            if (param.cache_size <= 0)
                return "cache_size <= 0";

            if (param.eps <= 0)
                return "eps <= 0";

            if (svm_type == svm_parameter.C_SVC || svm_type == svm_parameter.EPSILON_SVR ||
                svm_type == svm_parameter.NU_SVR)
                if (param.C <= 0)
                    return "C <= 0";

            if (svm_type == svm_parameter.NU_SVC || svm_type == svm_parameter.ONE_CLASS ||
                svm_type == svm_parameter.NU_SVR)
                if (param.nu < 0 || param.nu > 1)
                    return "nu < 0 or nu > 1";

            if (svm_type == svm_parameter.EPSILON_SVR)
                if (param.p < 0)
                    return "p < 0";

            if (param.shrinking != 0 && param.shrinking != 1)
                return "shrinking != 0 and shrinking != 1";

            if (param.probability != 0 && param.probability != 1)
                return "probability != 0 and probability != 1";

            if (param.probability == 1 && svm_type == svm_parameter.ONE_CLASS)
                return "one-class SVM probability output not supported yet";

            // check whether nu-svc is feasible

            if (svm_type == svm_parameter.NU_SVC)
            {
                int l = prob.l;
                int max_nr_class = 16;
                int nr_class = 0;
                var label = new int[max_nr_class];
                var count = new int[max_nr_class];

                int i;
                for (i = 0; i < l; i++)
                {
                    //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
                    var this_label = (int) prob.y[i];
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (this_label == label[j])
                        {
                            ++count[j];
                            break;
                        }

                    if (j == nr_class)
                    {
                        if (nr_class == max_nr_class)
                        {
                            max_nr_class *= 2;
                            var new_data = new int[max_nr_class];
                            Array.Copy(label, 0, new_data, 0, label.Length);
                            label = new_data;

                            new_data = new int[max_nr_class];
                            Array.Copy(count, 0, new_data, 0, count.Length);
                            count = new_data;
                        }
                        label[nr_class] = this_label;
                        count[nr_class] = 1;
                        ++nr_class;
                    }
                }

                for (i = 0; i < nr_class; i++)
                {
                    int n1 = count[i];
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        int n2 = count[j];
                        if (param.nu*(n1 + n2)/2 > Math.Min(n1, n2))
                            return "specified nu is infeasible";
                    }
                }
            }

            return null;
        }
Beispiel #6
0
        public static svm_model svm_load_model(StringReader fp)
        {
            // read parameters

            var model = new svm_model();
            var param = new svm_parameter();
            model.param = param;
            model.rho = null;
            model.probA = null;
            model.probB = null;
            model.label = null;
            model.nSV = null;

            while (true)
            {
                String cmd = fp.ReadLine();
                String arg = cmd.Substring(cmd.IndexOf(' ') + 1);

                if (cmd.StartsWith("svm_type"))
                {
                    int i;
                    for (i = 0; i < svm_type_table.Length; i++)
                    {
                        if (arg.IndexOf(svm_type_table[i]) != -1)
                        {
                            param.svm_type = i;
                            break;
                        }
                    }
                    if (i == svm_type_table.Length)
                    {
                        Console.Error.Write("unknown svm type.\n");
                        return null;
                    }
                }
                else if (cmd.StartsWith("kernel_type"))
                {
                    int i;
                    for (i = 0; i < kernel_type_table.Length; i++)
                    {
                        if (arg.IndexOf(kernel_type_table[i]) != -1)
                        {
                            param.kernel_type = i;
                            break;
                        }
                    }
                    if (i == kernel_type_table.Length)
                    {
                        Console.Error.Write("unknown kernel function.\n");
                        return null;
                    }
                }
                else if (cmd.StartsWith("degree"))
                    param.degree = atof(arg);
                else if (cmd.StartsWith("gamma"))
                    param.gamma = atof(arg);
                else if (cmd.StartsWith("coef0"))
                    param.coef0 = atof(arg);
                else if (cmd.StartsWith("nr_class"))
                    model.nr_class = atoi(arg);
                else if (cmd.StartsWith("total_sv"))
                    model.l = atoi(arg);
                else if (cmd.StartsWith("rho"))
                {
                    int n = model.nr_class*(model.nr_class - 1)/2;
                    model.rho = new double[n];
                    var st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.rho[i] = atof(st.NextToken());
                }
                else if (cmd.StartsWith("label"))
                {
                    int n = model.nr_class;
                    model.label = new int[n];
                    var st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.label[i] = atoi(st.NextToken());
                }
                else if (cmd.StartsWith("probA"))
                {
                    int n = model.nr_class*(model.nr_class - 1)/2;
                    model.probA = new double[n];
                    var st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.probA[i] = atof(st.NextToken());
                }
                else if (cmd.StartsWith("probB"))
                {
                    int n = model.nr_class*(model.nr_class - 1)/2;
                    model.probB = new double[n];
                    var st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.probB[i] = atof(st.NextToken());
                }
                else if (cmd.StartsWith("nr_sv"))
                {
                    int n = model.nr_class;
                    model.nSV = new int[n];
                    var st = new SupportClass.Tokenizer(arg);
                    for (int i = 0; i < n; i++)
                        model.nSV[i] = atoi(st.NextToken());
                }
                else if (cmd.StartsWith("SV"))
                {
                    break;
                }
                else
                {
                    Console.Error.Write("unknown text in model file\n");
                    return null;
                }
            }

            // read sv_coef and SV

            int m = model.nr_class - 1;
            int l = model.l;
            model.sv_coef = new double[m][];
            for (int i = 0; i < m; i++)
            {
                model.sv_coef[i] = new double[l];
            }
            model.SV = new svm_node[l][];

            for (int i = 0; i < l; i++)
            {
                String line = fp.ReadLine();
                var st = new SupportClass.Tokenizer(line, " \t\n\r\f:");

                for (int k = 0; k < m; k++)
                    model.sv_coef[k][i] = atof(st.NextToken());
                int n = st.Count/2;
                model.SV[i] = new svm_node[n];
                for (int j = 0; j < n; j++)
                {
                    model.SV[i][j] = new svm_node();
                    model.SV[i][j].index = atoi(st.NextToken());
                    model.SV[i][j].value_Renamed = atof(st.NextToken());
                }
            }

            return model;
        }
Beispiel #7
0
        //
        // Interface functions
        //
        public static svm_model svm_train(svm_problem prob, svm_parameter param)
        {
            var model = new svm_model();
            model.param = param;

            if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR ||
                param.svm_type == svm_parameter.NU_SVR)
            {
                // regression or one-class-svm
                model.nr_class = 2;
                model.label = null;
                model.nSV = null;
                model.probA = null;
                model.probB = null;
                model.sv_coef = new double[1][];

                if (param.probability == 1 &&
                    (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR))
                {
                    model.probA = new double[1];
                    model.probA[0] = svm_svr_probability(prob, param);
                }

                decision_function f = svm_train_one(prob, param, 0, 0);
                model.rho = new double[1];
                model.rho[0] = f.rho;

                int nSV = 0;
                int i;
                for (i = 0; i < prob.l; i++)
                    if (Math.Abs(f.alpha[i]) > 0)
                        ++nSV;
                model.l = nSV;
                model.SV = new svm_node[nSV][];
                model.sv_coef[0] = new double[nSV];
                int j = 0;
                for (i = 0; i < prob.l; i++)
                    if (Math.Abs(f.alpha[i]) > 0)
                    {
                        model.SV[j] = prob.x[i];
                        model.sv_coef[0][j] = f.alpha[i];
                        ++j;
                    }
            }
            else
            {
                // classification
                // find out the number of classes
                int l = prob.l;
                int max_nr_class = 16;
                int nr_class = 0;
                var label = new int[max_nr_class];
                var count = new int[max_nr_class];
                var index = new int[l];

                int i;
                for (i = 0; i < l; i++)
                {
                    //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
                    var this_label = (int) prob.y[i];
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (this_label == label[j])
                        {
                            ++count[j];
                            break;
                        }
                    index[i] = j;
                    if (j == nr_class)
                    {
                        if (nr_class == max_nr_class)
                        {
                            max_nr_class *= 2;
                            var new_data = new int[max_nr_class];
                            Array.Copy(label, 0, new_data, 0, label.Length);
                            label = new_data;

                            new_data = new int[max_nr_class];
                            Array.Copy(count, 0, new_data, 0, count.Length);
                            count = new_data;
                        }
                        label[nr_class] = this_label;
                        count[nr_class] = 1;
                        ++nr_class;
                    }
                }

                // group training data of the same class

                var start = new int[nr_class];
                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                var x = new svm_node[l][];

                for (i = 0; i < l; i++)
                {
                    x[start[index[i]]] = prob.x[i];
                    ++start[index[i]];
                }

                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                // calculate weighted C

                var weighted_C = new double[nr_class];
                for (i = 0; i < nr_class; i++)
                    weighted_C[i] = param.C;
                for (i = 0; i < param.nr_weight; i++)
                {
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (param.weight_label[i] == label[j])
                            break;
                    if (j == nr_class)
                        Console.Error.Write("warning: class label " + param.weight_label[i] +
                                            " specified in weight is not found\n");
                    else
                        weighted_C[j] *= param.weight[i];
                }

                // train k*(k-1)/2 models

                var nonzero = new bool[l];
                for (i = 0; i < l; i++)
                    nonzero[i] = false;
                var f = new decision_function[nr_class*(nr_class - 1)/2];

                double[] probA = null, probB = null;
                if (param.probability == 1)
                {
                    probA = new double[nr_class*(nr_class - 1)/2];
                    probB = new double[nr_class*(nr_class - 1)/2];
                }

                int p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        var sub_prob = new svm_problem();
                        int si = start[i], sj = start[j];
                        int ci = count[i], cj = count[j];
                        sub_prob.l = ci + cj;
                        sub_prob.x = new svm_node[sub_prob.l][];
                        sub_prob.y = new double[sub_prob.l];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            sub_prob.x[k] = x[si + k];
                            sub_prob.y[k] = + 1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            sub_prob.x[ci + k] = x[sj + k];
                            sub_prob.y[ci + k] = - 1;
                        }

                        if (param.probability == 1)
                        {
                            var probAB = new double[2];
                            svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
                            probA[p] = probAB[0];
                            probB[p] = probAB[1];
                        }

                        f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
                        for (k = 0; k < ci; k++)
                            if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0)
                                nonzero[si + k] = true;
                        for (k = 0; k < cj; k++)
                            if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0)
                                nonzero[sj + k] = true;
                        ++p;
                    }

                // build output

                model.nr_class = nr_class;

                model.label = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    model.label[i] = label[i];

                model.rho = new double[nr_class*(nr_class - 1)/2];
                for (i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    model.rho[i] = f[i].rho;

                if (param.probability == 1)
                {
                    model.probA = new double[nr_class*(nr_class - 1)/2];
                    model.probB = new double[nr_class*(nr_class - 1)/2];
                    for (i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    {
                        model.probA[i] = probA[i];
                        model.probB[i] = probB[i];
                    }
                }
                else
                {
                    model.probA = null;
                    model.probB = null;
                }

                int nnz = 0;
                var nz_count = new int[nr_class];
                model.nSV = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    int nSV = 0;
                    for (int j = 0; j < count[i]; j++)
                        if (nonzero[start[i] + j])
                        {
                            ++nSV;
                            ++nnz;
                        }
                    model.nSV[i] = nSV;
                    nz_count[i] = nSV;
                }

                Console.Out.Write("Total nSV = " + nnz + "\n");

                model.l = nnz;
                model.SV = new svm_node[nnz][];
                p = 0;
                for (i = 0; i < l; i++)
                    if (nonzero[i])
                        model.SV[p++] = x[i];

                var nz_start = new int[nr_class];
                nz_start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

                model.sv_coef = new double[nr_class - 1][];
                for (i = 0; i < nr_class - 1; i++)
                    model.sv_coef[i] = new double[nnz];

                p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]

                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];

                        int q = nz_start[i];
                        int k;
                        for (k = 0; k < ci; k++)
                            if (nonzero[si + k])
                                model.sv_coef[j - 1][q++] = f[p].alpha[k];
                        q = nz_start[j];
                        for (k = 0; k < cj; k++)
                            if (nonzero[sj + k])
                                model.sv_coef[i][q++] = f[p].alpha[ci + k];
                        ++p;
                    }
            }
            return model;
        }
Beispiel #8
0
        // Cross-validation decision values for probability estimates
        private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn,
                                                       double[] probAB)
        {
            int i;
            int nr_fold = 5;
            var perm = new int[prob.l];
            var dec_values = new double[prob.l];

            // random shuffle
            for (i = 0; i < prob.l; i++)
                perm[i] = i;
            for (i = 0; i < prob.l; i++)
            {
                //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
                int j = i + (int) (SupportClass.Random.NextDouble()*(prob.l - i));
                do
                {
                    int _ = perm[i];
                    perm[i] = perm[j];
                    perm[j] = _;
                } while (false);
            }
            for (i = 0; i < nr_fold; i++)
            {
                int begin = i*prob.l/nr_fold;
                int end = (i + 1)*prob.l/nr_fold;
                int j, k;
                var subprob = new svm_problem();

                subprob.l = prob.l - (end - begin);
                subprob.x = new svm_node[subprob.l][];
                subprob.y = new double[subprob.l];

                k = 0;
                for (j = 0; j < begin; j++)
                {
                    subprob.x[k] = prob.x[perm[j]];
                    subprob.y[k] = prob.y[perm[j]];
                    ++k;
                }
                for (j = end; j < prob.l; j++)
                {
                    subprob.x[k] = prob.x[perm[j]];
                    subprob.y[k] = prob.y[perm[j]];
                    ++k;
                }
                int p_count = 0, n_count = 0;
                for (j = 0; j < k; j++)
                    if (subprob.y[j] > 0)
                        p_count++;
                    else
                        n_count++;

                if (p_count == 0 && n_count == 0)
                    for (j = begin; j < end; j++)
                        dec_values[perm[j]] = 0;
                else if (p_count > 0 && n_count == 0)
                    for (j = begin; j < end; j++)
                        dec_values[perm[j]] = 1;
                else if (p_count == 0 && n_count > 0)
                    for (j = begin; j < end; j++)
                        dec_values[perm[j]] = - 1;
                else
                {
                    var subparam = (svm_parameter) param.Clone();
                    subparam.probability = 0;
                    subparam.C = 1.0;
                    subparam.nr_weight = 2;
                    subparam.weight_label = new int[2];
                    subparam.weight = new double[2];
                    subparam.weight_label[0] = + 1;
                    subparam.weight_label[1] = - 1;
                    subparam.weight[0] = Cp;
                    subparam.weight[1] = Cn;
                    svm_model submodel = svm_train(subprob, subparam);
                    for (j = begin; j < end; j++)
                    {
                        var dec_value = new double[1];
                        svm_predict_values(submodel, prob.x[perm[j]], dec_value);
                        dec_values[perm[j]] = dec_value[0];
                        // ensure +1 -1 order; reason not using CV subroutine
                        dec_values[perm[j]] *= submodel.label[0];
                    }
                }
            }
            sigmoid_train(prob.l, dec_values, prob.y, probAB);
        }
Beispiel #9
0
        private static void solve_nu_svr(svm_problem prob, svm_parameter param, double[] alpha, Solver.SolutionInfo si)
        {
            int l = prob.l;
            double C = param.C;
            var alpha2 = new double[2*l];
            var linear_term = new double[2*l];
            var y = new sbyte[2*l];
            int i;

            double sum = C*param.nu*l/2;
            for (i = 0; i < l; i++)
            {
                alpha2[i] = alpha2[i + l] = Math.Min(sum, C);
                sum -= alpha2[i];

                linear_term[i] = - prob.y[i];
                y[i] = 1;

                linear_term[i + l] = prob.y[i];
                y[i + l] = - 1;
            }

            var s = new Solver_NU();
            s.Solve(2*l, new SVR_Q(prob, param), linear_term, y, alpha2, C, C, param.eps, si, param.shrinking);

            Console.Out.Write("epsilon = " + (- si.r) + "\n");

            for (i = 0; i < l; i++)
                alpha[i] = alpha2[i] - alpha2[i + l];
        }
Beispiel #10
0
        private static void solve_one_class(svm_problem prob, svm_parameter param, double[] alpha,
                                            Solver.SolutionInfo si)
        {
            int l = prob.l;
            var zeros = new double[l];
            var ones = new sbyte[l];
            int i;

            //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
            var n = (int) (param.nu*prob.l); // # of alpha's at upper bound

            for (i = 0; i < n; i++)
                alpha[i] = 1;
            alpha[n] = param.nu*prob.l - n;
            for (i = n + 1; i < l; i++)
                alpha[i] = 0;

            for (i = 0; i < l; i++)
            {
                zeros[i] = 0;
                ones[i] = 1;
            }

            var s = new Solver();
            s.Solve(l, new ONE_CLASS_Q(prob, param), zeros, ones, alpha, 1.0, 1.0, param.eps, si, param.shrinking);
        }
        /// <summary>
        /// Load the models. 
        /// </summary>
        /// <param name="xmlin">Where to read the models from.</param>
        /// <param name="network">Where the models are read into.</param>
        private void HandleModels(ReadXML xmlin, SVMNetwork network)
        {

            int index = 0;
            while (xmlin.ReadToTag())
            {
                if (xmlin.IsIt(SVMNetworkPersistor.TAG_MODEL, true))
                {
                    svm_parameter param = new svm_parameter();
                    svm_model model = new svm_model();
                    model.param = param;
                    network.Models[index] = model;
                    HandleModel(xmlin, network.Models[index]);
                    index++;
                }
                else if (xmlin.IsIt(SVMNetworkPersistor.TAG_MODELS, false))
                {
                    break;
                }
            }

        }
        /// <summary>
        /// Evaluate the error for the specified model.
        /// </summary>
        ///
        /// <param name="param">The params for the SVN.</param>
        /// <param name="prob">The problem to evaluate.</param>
        /// <param name="target">The output values from the SVN.</param>
        /// <returns>The calculated error.</returns>
        private static double Evaluate(svm_parameter param, svm_problem prob,
                                double[] target)
        {
            int totalCorrect = 0;

            var error = new ErrorCalculation();

            if ((param.svm_type == svm_parameter.EPSILON_SVR)
                || (param.svm_type == svm_parameter.NU_SVR))
            {
                for (int i = 0; i < prob.l; i++)
                {
                    double ideal = prob.y[i];
                    double actual = target[i];
                    error.UpdateError(actual, ideal);
                }
                return error.Calculate();
            }
            for (int i = 0; i < prob.l; i++)
            {
                if (target[i] == prob.y[i])
                {
                    ++totalCorrect;
                }
            }

            return Format.HundredPercent*totalCorrect/prob.l;
        }
Beispiel #13
0
 private static double x308cb2f3483de2a6(svm_parameter x0d173b5435b4d6ad, svm_problem xdee3898b83df48b4, double[] x11d58b056c032b03)
 {
     ErrorCalculation calculation;
     int num2;
     double num3;
     int num5;
     int num = 0;
     if (0 == 0)
     {
         if ((((uint) num3) & 0) != 0)
         {
             goto Label_0134;
         }
         goto Label_0108;
     }
     goto Label_008C;
     Label_0055:
     if (num2 >= xdee3898b83df48b4.l)
     {
         return calculation.Calculate();
     }
     Label_008C:
     num3 = xdee3898b83df48b4.y[num2];
     double actual = x11d58b056c032b03[num2];
     if (((uint) num3) < 0)
     {
         goto Label_0108;
     }
     calculation.UpdateError(actual, num3);
     num2++;
     if ((((uint) num3) - ((uint) num3)) >= 0)
     {
         goto Label_0055;
     }
     Label_00D4:
     if ((((uint) num5) & 0) == 0)
     {
         while (num5 < xdee3898b83df48b4.l)
         {
             while (x11d58b056c032b03[num5] == xdee3898b83df48b4.y[num5])
             {
                 num++;
                 if ((((uint) actual) + ((uint) actual)) <= uint.MaxValue)
                 {
                     break;
                 }
             }
             num5++;
         }
     }
     goto Label_0134;
     Label_0108:
     calculation = new ErrorCalculation();
     if (((x0d173b5435b4d6ad.svm_type != 3) && ((((uint) num5) | 3) != 0)) && (x0d173b5435b4d6ad.svm_type != 4))
     {
         num5 = 0;
         if ((((uint) num3) + ((uint) actual)) <= uint.MaxValue)
         {
             goto Label_00D4;
         }
         goto Label_008C;
     }
     num2 = 0;
     goto Label_0055;
     Label_0134:
     return ((100.0 * num) / ((double) xdee3898b83df48b4.l));
 }
Beispiel #14
0
        /// <summary>
        /// Construct a SVM network. 
        /// </summary>
        /// <param name="inputCount">The input count.</param>
        /// <param name="outputCount">The output count.</param>
        /// <param name="svmType">The type of SVM.</param>
        /// <param name="kernelType">The SVM kernal type.</param>
        public SVMNetwork(int inputCount, int outputCount, SVMType svmType,
                KernelType kernelType)
        {
            this.inputCount = inputCount;
            this.outputCount = outputCount;
            this.kernelType = kernelType;
            this.svmType = svmType;

            models = new svm_model[outputCount];
            parameters = new svm_parameter[outputCount];

            for (int i = 0; i < outputCount; i++)
            {
                parameters[i] = new svm_parameter();

                switch (svmType)
                {
                    case SVMType.SupportVectorClassification:
                        parameters[i].svm_type = svm_parameter.C_SVC;
                        break;
                    case SVMType.NewSupportVectorClassification:
                        parameters[i].svm_type = svm_parameter.NU_SVC;
                        break;
                    case SVMType.SupportVectorOneClass:
                        parameters[i].svm_type = svm_parameter.ONE_CLASS;
                        break;
                    case SVMType.EpsilonSupportVectorRegression:
                        parameters[i].svm_type = svm_parameter.EPSILON_SVR;
                        break;
                    case SVMType.NewSupportVectorRegression:
                        parameters[i].svm_type = svm_parameter.NU_SVR;
                        break;
                }

                switch (kernelType)
                {
                    case KernelType.Linear:
                        parameters[i].kernel_type = svm_parameter.LINEAR;
                        break;
                    case KernelType.Poly:
                        parameters[i].kernel_type = svm_parameter.POLY;
                        break;
                    case KernelType.RadialBasisFunction:
                        parameters[i].kernel_type = svm_parameter.RBF;
                        break;
                    case KernelType.Sigmoid:
                        parameters[i].kernel_type = svm_parameter.SIGMOID;
                        break;
                }

                parameters[i].kernel_type = svm_parameter.RBF;
                parameters[i].degree = 3;
                parameters[i].coef0 = 0;
                parameters[i].nu = 0.5;
                parameters[i].cache_size = 100;
                parameters[i].C = 1;
                parameters[i].eps = 1e-3;
                parameters[i].p = 0.1;
                parameters[i].shrinking = 1;
                parameters[i].probability = 0;
                parameters[i].nr_weight = 0;
                parameters[i].weight_label = new int[0];
                parameters[i].weight = new double[0];
                parameters[i].gamma = 1.0 / inputCount;
            }
        }
Beispiel #15
0
        public SupportVectorMachine(int theInputCount, Encog.ML.SVM.SVMType svmType, Encog.ML.SVM.KernelType kernelType)
        {
            goto Label_02B7;
            Label_000B:
            this._paras.nr_weight = 0;
            Label_0017:
            this._paras.weight_label = new int[0];
            this._paras.weight = new double[0];
            this._paras.gamma = 1.0 / ((double) this._inputCount);
            goto Label_00C6;
            Label_0057:
            this._paras.C = 1.0;
            this._paras.eps = 0.001;
            this._paras.p = 0.1;
            this._paras.shrinking = 1;
            this._paras.probability = 0;
            if ((((uint) theInputCount) + ((uint) theInputCount)) >= 0)
            {
                goto Label_000B;
            }
            Label_00C6:
            if (0 == 0)
            {
                return;
            }
            goto Label_02B7;
            Label_00D1:
            this._paras.degree = 3.0;
            this._paras.coef0 = 0.0;
            this._paras.nu = 0.5;
            if (0 != 0)
            {
                goto Label_0256;
            }
            this._paras.cache_size = 100.0;
            if (0 == 0)
            {
                goto Label_0057;
            }
            goto Label_019C;
            Label_0134:
            this._paras.kernel_type = 3;
            goto Label_00D1;
            Label_0167:
            this._paras.kernel_type = 0;
            goto Label_00D1;
            Label_019C:
            switch (kernelType)
            {
                case Encog.ML.SVM.KernelType.Linear:
                    goto Label_0167;

                case Encog.ML.SVM.KernelType.Poly:
                    this._paras.kernel_type = 1;
                    goto Label_00D1;

                case Encog.ML.SVM.KernelType.RadialBasisFunction:
                    this._paras.kernel_type = 2;
                    goto Label_00D1;

                case Encog.ML.SVM.KernelType.Sigmoid:
                    goto Label_0134;

                default:
                    throw new NeuralNetworkError("Invalid kernel type");
            }
            Label_01CD:
            throw new NeuralNetworkError("Invalid svm type");
            if ((((uint) theInputCount) - ((uint) theInputCount)) < 0)
            {
                goto Label_0134;
            }
            if (8 != 0)
            {
                goto Label_019C;
            }
            if ((((uint) theInputCount) + ((uint) theInputCount)) < 0)
            {
                goto Label_01CD;
            }
            goto Label_0167;
            if (0 == 0)
            {
                goto Label_00D1;
            }
            goto Label_0057;
            Label_0256:
            this._paras.svm_type = 2;
            goto Label_019C;
            Label_02B7:
            this._inputCount = theInputCount;
            this._paras = new svm_parameter();
            if ((((uint) theInputCount) - ((uint) theInputCount)) > uint.MaxValue)
            {
                goto Label_00D1;
            }
            switch (svmType)
            {
                case Encog.ML.SVM.SVMType.SupportVectorClassification:
                    this._paras.svm_type = 0;
                    if (0 != 0)
                    {
                        goto Label_0017;
                    }
                    goto Label_019C;

                case Encog.ML.SVM.SVMType.NewSupportVectorClassification:
                    this._paras.svm_type = 1;
                    if ((((uint) theInputCount) | uint.MaxValue) == 0)
                    {
                        goto Label_000B;
                    }
                    goto Label_019C;

                case Encog.ML.SVM.SVMType.SupportVectorOneClass:
                    goto Label_0256;

                case Encog.ML.SVM.SVMType.EpsilonSupportVectorRegression:
                    this._paras.svm_type = 3;
                    if ((((uint) theInputCount) + ((uint) theInputCount)) <= uint.MaxValue)
                    {
                        goto Label_019C;
                    }
                    goto Label_02B7;

                case Encog.ML.SVM.SVMType.NewSupportVectorRegression:
                    this._paras.svm_type = 4;
                    if (((uint) theInputCount) > uint.MaxValue)
                    {
                        return;
                    }
                    goto Label_019C;

                default:
                    goto Label_01CD;
            }
        }
Beispiel #16
0
        //
        // construct and solve various formulations
        //
        private static void solve_c_svc(svm_problem prob, svm_parameter param, double[] alpha, Solver.SolutionInfo si,
                                        double Cp, double Cn)
        {
            int l = prob.l;
            var minus_ones = new double[l];
            var y = new sbyte[l];

            int i;

            for (i = 0; i < l; i++)
            {
                alpha[i] = 0;
                minus_ones[i] = - 1;
                if (prob.y[i] > 0)
                    y[i] = (+ 1);
                else
                    y[i] = - 1;
            }

            var s = new Solver();
            s.Solve(l, new SVC_Q(prob, param, y), minus_ones, y, alpha, Cp, Cn, param.eps, si, param.shrinking);

            double sum_alpha = 0;
            for (i = 0; i < l; i++)
                sum_alpha += alpha[i];

            if (Cp == Cn)
                Console.Out.Write("nu = " + sum_alpha/(Cp*prob.l) + "\n");

            for (i = 0; i < l; i++)
                alpha[i] *= y[i];
        }
Beispiel #17
0
        private static void solve_nu_svc(svm_problem prob, svm_parameter param, double[] alpha, Solver.SolutionInfo si)
        {
            int i;
            int l = prob.l;
            double nu = param.nu;

            var y = new sbyte[l];

            for (i = 0; i < l; i++)
                if (prob.y[i] > 0)
                    y[i] = (+ 1);
                else
                    y[i] = - 1;

            double sum_pos = nu*l/2;
            double sum_neg = nu*l/2;

            for (i = 0; i < l; i++)
                if (y[i] == + 1)
                {
                    alpha[i] = Math.Min(1.0, sum_pos);
                    sum_pos -= alpha[i];
                }
                else
                {
                    alpha[i] = Math.Min(1.0, sum_neg);
                    sum_neg -= alpha[i];
                }

            var zeros = new double[l];

            for (i = 0; i < l; i++)
                zeros[i] = 0;

            var s = new Solver_NU();
            s.Solve(l, new SVC_Q(prob, param, y), zeros, y, alpha, 1.0, 1.0, param.eps, si, param.shrinking);
            double r = si.r;

            Console.Out.Write("C = " + 1/r + "\n");

            for (i = 0; i < l; i++)
                alpha[i] *= y[i]/r;

            si.rho /= r;
            si.obj /= (r*r);
            si.upper_bound_p = 1/r;
            si.upper_bound_n = 1/r;
        }
 /// <summary>
 /// Construct the SVM.
 /// </summary>
 ///
 public SupportVectorMachine()
 {
     _paras = new svm_parameter();
 }
Beispiel #19
0
        private static void solve_epsilon_svr(svm_problem prob, svm_parameter param, double[] alpha,
                                              Solver.SolutionInfo si)
        {
            int l = prob.l;
            var alpha2 = new double[2*l];
            var linear_term = new double[2*l];
            var y = new sbyte[2*l];
            int i;

            for (i = 0; i < l; i++)
            {
                alpha2[i] = 0;
                linear_term[i] = param.p - prob.y[i];
                y[i] = 1;

                alpha2[i + l] = 0;
                linear_term[i + l] = param.p + prob.y[i];
                y[i + l] = - 1;
            }

            var s = new Solver();
            s.Solve(2*l, new SVR_Q(prob, param), linear_term, y, alpha2, param.C, param.C, param.eps, si,
                    param.shrinking);

            double sum_alpha = 0;
            for (i = 0; i < l; i++)
            {
                alpha[i] = alpha2[i] - alpha2[i + l];
                sum_alpha += Math.Abs(alpha[i]);
            }
            Console.Out.Write("nu = " + sum_alpha/(param.C*l) + "\n");
        }
        /// <summary>
        /// Construct a SVM network.
        /// </summary>
        ///
        /// <param name="theInputCount">The input count.</param>
        /// <param name="svmType">The type of SVM.</param>
        /// <param name="kernelType">The SVM kernal type.</param>
        public SupportVectorMachine(int theInputCount, SVMType svmType,
            KernelType kernelType)
        {
            _inputCount = theInputCount;

            _paras = new svm_parameter();

            switch (svmType)
            {
                case SVMType.SupportVectorClassification:
                    _paras.svm_type = svm_parameter.C_SVC;
                    break;
                case SVMType.NewSupportVectorClassification:
                    _paras.svm_type = svm_parameter.NU_SVC;
                    break;
                case SVMType.SupportVectorOneClass:
                    _paras.svm_type = svm_parameter.ONE_CLASS;
                    break;
                case SVMType.EpsilonSupportVectorRegression:
                    _paras.svm_type = svm_parameter.EPSILON_SVR;
                    break;
                case SVMType.NewSupportVectorRegression:
                    _paras.svm_type = svm_parameter.NU_SVR;
                    break;
                default:
                    throw new NeuralNetworkError("Invalid svm type");
            }

            switch (kernelType)
            {
                case KernelType.Linear:
                    _paras.kernel_type = svm_parameter.LINEAR;
                    break;
                case KernelType.Poly:
                    _paras.kernel_type = svm_parameter.POLY;
                    break;
                case KernelType.RadialBasisFunction:
                    _paras.kernel_type = svm_parameter.RBF;
                    break;
                case KernelType.Sigmoid:
                    _paras.kernel_type = svm_parameter.SIGMOID;
                    break;
                    /*case Encog.ML.SVM.KernelType.Precomputed:
                this.paras.kernel_type = Encog.MathUtil.LIBSVM.svm_parameter.PRECOMPUTED;
                break;*/
                default:
                    throw new NeuralNetworkError("Invalid kernel type");
            }

            // params[i].kernel_type = svm_parameter.RBF;
            _paras.degree = DefaultDegree;
            _paras.coef0 = 0;
            _paras.nu = DefaultNu;
            _paras.cache_size = DefaultCacheSize;
            _paras.C = 1;
            _paras.eps = DefaultEps;
            _paras.p = DefaultP;
            _paras.shrinking = 1;
            _paras.probability = 0;
            _paras.nr_weight = 0;
            _paras.weight_label = new int[0];
            _paras.weight = new double[0];
            _paras.gamma = 1.0d/_inputCount;
        }
Beispiel #21
0
        internal static decision_function svm_train_one(svm_problem prob, svm_parameter param, double Cp, double Cn)
        {
            var alpha = new double[prob.l];
            var si = new Solver.SolutionInfo();
            switch (param.svm_type)
            {
                case svm_parameter.C_SVC:
                    solve_c_svc(prob, param, alpha, si, Cp, Cn);
                    break;

                case svm_parameter.NU_SVC:
                    solve_nu_svc(prob, param, alpha, si);
                    break;

                case svm_parameter.ONE_CLASS:
                    solve_one_class(prob, param, alpha, si);
                    break;

                case svm_parameter.EPSILON_SVR:
                    solve_epsilon_svr(prob, param, alpha, si);
                    break;

                case svm_parameter.NU_SVR:
                    solve_nu_svr(prob, param, alpha, si);
                    break;
            }

            Console.Out.Write("obj = " + si.obj + ", rho = " + si.rho + "\n");

            // output SVs

            int nSV = 0;
            int nBSV = 0;
            for (int i = 0; i < prob.l; i++)
            {
                if (Math.Abs(alpha[i]) > 0)
                {
                    ++nSV;
                    if (prob.y[i] > 0)
                    {
                        if (Math.Abs(alpha[i]) >= si.upper_bound_p)
                            ++nBSV;
                    }
                    else
                    {
                        if (Math.Abs(alpha[i]) >= si.upper_bound_n)
                            ++nBSV;
                    }
                }
            }

            Console.Out.Write("nSV = " + nSV + ", nBSV = " + nBSV + "\n");

            var f = new decision_function();
            f.alpha = alpha;
            f.rho = si.rho;
            return f;
        }
        /// <summary>
        /// Construct a SVM from a model.
        /// </summary>
        ///
        /// <param name="theModel">The model.</param>
        public SupportVectorMachine(svm_model theModel)
        {
            _model = theModel;
            _paras = _model.param;
            _inputCount = 0;

            // determine the input count
            foreach (var element  in  _model.SV)
            {
                foreach (svm_node t in element)
                {
                    _inputCount = Math.Max(t.index, _inputCount);
                }
            }

            //
        }
Beispiel #23
0
        // Return parameter of a Laplace distribution 
        private static double svm_svr_probability(svm_problem prob, svm_parameter param)
        {
            int i;
            int nr_fold = 5;
            var ymv = new double[prob.l];
            double mae = 0;

            var newparam = (svm_parameter) param.Clone();
            newparam.probability = 0;
            svm_cross_validation(prob, newparam, nr_fold, ymv);
            for (i = 0; i < prob.l; i++)
            {
                ymv[i] = prob.y[i] - ymv[i];
                mae += Math.Abs(ymv[i]);
            }
            mae /= prob.l;
            double std = Math.Sqrt(2*mae*mae);
            int count = 0;
            mae = 0;
            for (i = 0; i < prob.l; i++)
                if (Math.Abs(ymv[i]) > 5*std)
                    count = count + 1;
                else
                    mae += Math.Abs(ymv[i]);
            mae /= (prob.l - count);
            Console.Error.Write(
                "Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" +
                mae + "\n");
            return mae;
        }
Beispiel #24
0
 internal SVC_Q(svm_problem prob, svm_parameter param, sbyte[] y_) : base(prob.l, prob.x, param)
 {
     y = new sbyte[y_.Length];
     y_.CopyTo(y, 0);
     //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
     cache = new Cache(prob.l, (int) (param.cache_size*(1 << 20)));
 }
Beispiel #25
0
        public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
        {
            int i;
            var perm = new int[prob.l];

            // random shuffle
            for (i = 0; i < prob.l; i++)
                perm[i] = i;
            for (i = 0; i < prob.l; i++)
            {
                //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
                int j = i + (int) (SupportClass.Random.NextDouble()*(prob.l - i));
                do
                {
                    int _ = perm[i];
                    perm[i] = perm[j];
                    perm[j] = _;
                } while (false);
            }
            for (i = 0; i < nr_fold; i++)
            {
                int begin = i*prob.l/nr_fold;
                int end = (i + 1)*prob.l/nr_fold;
                int j, k;
                var subprob = new svm_problem();

                subprob.l = prob.l - (end - begin);
                subprob.x = new svm_node[subprob.l][];
                subprob.y = new double[subprob.l];

                k = 0;
                for (j = 0; j < begin; j++)
                {
                    subprob.x[k] = prob.x[perm[j]];
                    subprob.y[k] = prob.y[perm[j]];
                    ++k;
                }
                for (j = end; j < prob.l; j++)
                {
                    subprob.x[k] = prob.x[perm[j]];
                    subprob.y[k] = prob.y[perm[j]];
                    ++k;
                }
                svm_model submodel = svm_train(subprob, param);
                if (param.probability == 1 &&
                    (param.svm_type == svm_parameter.C_SVC || param.svm_type == svm_parameter.NU_SVC))
                {
                    var prob_estimates = new double[svm_get_nr_class(submodel)];
                    for (j = begin; j < end; j++)
                        target[perm[j]] = svm_predict_probability(submodel, prob.x[perm[j]], prob_estimates);
                }
                else
                    for (j = begin; j < end; j++)
                        target[perm[j]] = svm_predict(submodel, prob.x[perm[j]]);
            }
        }
Beispiel #26
0
 internal ONE_CLASS_Q(svm_problem prob, svm_parameter param) : base(prob.l, prob.x, param)
 {
     //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
     cache = new Cache(prob.l, (int) (param.cache_size*(1 << 20)));
 }
Beispiel #27
0
        internal Kernel(int l, svm_node[][] x_, svm_parameter param)
        {
            kernel_type = param.kernel_type;
            degree = param.degree;
            gamma = param.gamma;
            coef0 = param.coef0;

            x = (svm_node[][]) x_.Clone();

            if (kernel_type == svm_parameter.RBF)
            {
                x_square = new double[l];
                for (int i = 0; i < l; i++)
                    x_square[i] = dot(x[i], x[i]);
            }
            else
                x_square = null;
        }
Beispiel #28
0
 internal SVR_Q(svm_problem prob, svm_parameter param) : base(prob.l, prob.x, param)
 {
     l = prob.l;
     //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
     cache = new Cache(l, (int) (param.cache_size*(1 << 20)));
     sign = new sbyte[2*l];
     index = new int[2*l];
     for (int k = 0; k < l; k++)
     {
         sign[k] = 1;
         sign[k + l] = - 1;
         index[k] = k;
         index[k + l] = k;
     }
     buffer = new float[2][];
     for (int i = 0; i < 2; i++)
     {
         buffer[i] = new float[2*l];
     }
     next_buffer = 0;
 }
Beispiel #29
0
        internal static double k_function(svm_node[] x, svm_node[] y, svm_parameter param)
        {
            switch (param.kernel_type)
            {
                case svm_parameter.LINEAR:
                    return dot(x, y);

                case svm_parameter.POLY:
                    return Math.Pow(param.gamma*dot(x, y) + param.coef0, param.degree);

                case svm_parameter.RBF:
                    {
                        double sum = 0;
                        int xlen = x.Length;
                        int ylen = y.Length;
                        int i = 0;
                        int j = 0;
                        while (i < xlen && j < ylen)
                        {
                            if (x[i].index == y[j].index)
                            {
                                double d = x[i++].value_Renamed - y[j++].value_Renamed;
                                sum += d*d;
                            }
                            else if (x[i].index > y[j].index)
                            {
                                sum += y[j].value_Renamed*y[j].value_Renamed;
                                ++j;
                            }
                            else
                            {
                                sum += x[i].value_Renamed*x[i].value_Renamed;
                                ++i;
                            }
                        }

                        while (i < xlen)
                        {
                            sum += x[i].value_Renamed*x[i].value_Renamed;
                            ++i;
                        }

                        while (j < ylen)
                        {
                            sum += y[j].value_Renamed*y[j].value_Renamed;
                            ++j;
                        }

                        return Math.Exp((- param.gamma)*sum);
                    }

                case svm_parameter.SIGMOID:
                    return tanh(param.gamma*dot(x, y) + param.coef0);

                default:
                    return 0; // java
            }
        }
Beispiel #30
0
        internal static double xc60aa42cfbd0c1ed(svm_node[] x08db3aeabb253cb1, svm_node[] x1e218ceaee1bb583, svm_parameter x0d173b5435b4d6ad)
        {
            double num;
            int length;
            int num3;
            int num4;
            int num5;
            int num7 = x0d173b5435b4d6ad.kernel_type;
            switch (num7)
            {
                case 0:
                    return x99240096a9e3842c(x08db3aeabb253cb1, x1e218ceaee1bb583);

                case 1:
                    return Math.Pow((x0d173b5435b4d6ad.gamma * x99240096a9e3842c(x08db3aeabb253cb1, x1e218ceaee1bb583)) + x0d173b5435b4d6ad.coef0, x0d173b5435b4d6ad.degree);

                case 2:
                    num = 0.0;
                    length = x08db3aeabb253cb1.Length;
                    num3 = x1e218ceaee1bb583.Length;
                    num4 = 0;
                    num5 = 0;
                    goto Label_0095;

                case 3:
                    return xcb156489dc62ed23((x0d173b5435b4d6ad.gamma * x99240096a9e3842c(x08db3aeabb253cb1, x1e218ceaee1bb583)) + x0d173b5435b4d6ad.coef0);

                default:
                    goto Label_021B;
            }
            Label_008D:
            if (num4 < length)
            {
                num += x08db3aeabb253cb1[num4].value_Renamed * x08db3aeabb253cb1[num4].value_Renamed;
                if ((((uint) length) - ((uint) num)) <= uint.MaxValue)
                {
                    goto Label_0111;
                }
                goto Label_00DF;
            }
            if ((((uint) length) + ((uint) num4)) < 0)
            {
                goto Label_021B;
            }
            while (num5 < num3)
            {
                num += x1e218ceaee1bb583[num5].value_Renamed * x1e218ceaee1bb583[num5].value_Renamed;
                num5++;
            }
            return Math.Exp(-x0d173b5435b4d6ad.gamma * num);
            Label_0095:
            if (num4 < length)
            {
                if (num5 < num3)
                {
                    if (x08db3aeabb253cb1[num4].index != x1e218ceaee1bb583[num5].index)
                    {
                        goto Label_00DF;
                    }
                    double num6 = x08db3aeabb253cb1[num4++].value_Renamed - x1e218ceaee1bb583[num5++].value_Renamed;
                    num += num6 * num6;
                    goto Label_0095;
                }
                goto Label_008D;
            }
            if ((((uint) num5) + ((uint) num7)) < 0)
            {
                goto Label_00F2;
            }
            if ((((uint) num) - ((uint) length)) <= uint.MaxValue)
            {
                goto Label_008D;
            }
            Label_00DF:
            if (x08db3aeabb253cb1[num4].index > x1e218ceaee1bb583[num5].index)
            {
                num += x1e218ceaee1bb583[num5].value_Renamed * x1e218ceaee1bb583[num5].value_Renamed;
                num5++;
                goto Label_0095;
            }
            Label_00F2:
            num += x08db3aeabb253cb1[num4].value_Renamed * x08db3aeabb253cb1[num4].value_Renamed;
            num4++;
            if (0xff != 0)
            {
                goto Label_0095;
            }
            Label_0111:
            if ((((uint) num5) - ((uint) length)) <= uint.MaxValue)
            {
                num4++;
            }
            goto Label_008D;
            Label_021B:
            return 0.0;
        }