Пример #1
0
 internal xf094e3229d63c9be(int l, svm_node[][] x_, svm_parameter param)
 {
     int num;
     Label_00C5:
     this.xbcd9612004129925 = param.kernel_type;
     Label_0075:
     if ((((uint) num) | 0x80000000) != 0)
     {
         this.x83735b10b6a3d76d = param.degree;
         do
         {
             this.xc7c4e9c099884228 = param.gamma;
             this.x987b3666f8a37f90 = param.coef0;
             if (0xff != 0)
             {
                 this.x08db3aeabb253cb1 = (svm_node[][]) x_.Clone();
                 if (this.xbcd9612004129925 != 2)
                 {
                     this.x4f9e236643454e9f = null;
                     if (0 == 0)
                     {
                         if (0 == 0)
                         {
                             return;
                         }
                         goto Label_00C5;
                     }
                     goto Label_0075;
                 }
                 this.x4f9e236643454e9f = new double[l];
             }
         }
         while (2 == 0);
     }
     for (num = 0; num < l; num++)
     {
         this.x4f9e236643454e9f[num] = x99240096a9e3842c(this.x08db3aeabb253cb1[num], this.x08db3aeabb253cb1[num]);
     }
 }
Пример #2
0
        internal static double k_function(svm_node[] x, svm_node[] y, svm_parameter param)
        {
            switch (param.kernel_type)
            {
                case svm_parameter.LINEAR:
                    return dot(x, y);

                case svm_parameter.POLY:
                    return Math.Pow(param.gamma*dot(x, y) + param.coef0, param.degree);

                case svm_parameter.RBF:
                    {
                        double sum = 0;
                        int xlen = x.Length;
                        int ylen = y.Length;
                        int i = 0;
                        int j = 0;
                        while (i < xlen && j < ylen)
                        {
                            if (x[i].index == y[j].index)
                            {
                                double d = x[i++].value_Renamed - y[j++].value_Renamed;
                                sum += d*d;
                            }
                            else if (x[i].index > y[j].index)
                            {
                                sum += y[j].value_Renamed*y[j].value_Renamed;
                                ++j;
                            }
                            else
                            {
                                sum += x[i].value_Renamed*x[i].value_Renamed;
                                ++i;
                            }
                        }

                        while (i < xlen)
                        {
                            sum += x[i].value_Renamed*x[i].value_Renamed;
                            ++i;
                        }

                        while (j < ylen)
                        {
                            sum += y[j].value_Renamed*y[j].value_Renamed;
                            ++j;
                        }

                        return Math.Exp((- param.gamma)*sum);
                    }

                case svm_parameter.SIGMOID:
                    return tanh(param.gamma*dot(x, y) + param.coef0);

                default:
                    return 0; // java
            }
        }
Пример #3
0
 internal static double dot(svm_node[] x, svm_node[] y)
 {
     double sum = 0;
     int xlen = x.Length;
     int ylen = y.Length;
     int i = 0;
     int j = 0;
     while (i < xlen && j < ylen)
     {
         if (x[i].index == y[j].index)
             sum += x[i++].value_Renamed*y[j++].value_Renamed;
         else
         {
             if (x[i].index > y[j].index)
                 ++j;
             else
                 ++i;
         }
     }
     return sum;
 }
Пример #4
0
        internal Kernel(int l, svm_node[][] x_, svm_parameter param)
        {
            kernel_type = param.kernel_type;
            degree = param.degree;
            gamma = param.gamma;
            coef0 = param.coef0;

            x = (svm_node[][]) x_.Clone();

            if (kernel_type == svm_parameter.RBF)
            {
                x_square = new double[l];
                for (int i = 0; i < l; i++)
                    x_square[i] = dot(x[i], x[i]);
            }
            else
                x_square = null;
        }
Пример #5
0
        public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
        {
            if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
                model.probA != null && model.probB != null)
            {
                int i;
                int nr_class = model.nr_class;
                var dec_values = new double[nr_class*(nr_class - 1)/2];
                svm_predict_values(model, x, dec_values);

                double min_prob = 1e-7;
                var tmpArray = new double[nr_class][];
                for (int i2 = 0; i2 < nr_class; i2++)
                {
                    tmpArray[i2] = new double[nr_class];
                }
                double[][] pairwise_prob = tmpArray;

                int k = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        pairwise_prob[i][j] =
                            Math.Min(
                                Math.Max(sigmoid_predict(dec_values[k], model.probA[k], model.probB[k]), min_prob),
                                1 - min_prob);
                        pairwise_prob[j][i] = 1 - pairwise_prob[i][j];
                        k++;
                    }
                multiclass_probability(nr_class, pairwise_prob, prob_estimates);

                int prob_max_idx = 0;
                for (i = 1; i < nr_class; i++)
                    if (prob_estimates[i] > prob_estimates[prob_max_idx])
                        prob_max_idx = i;
                return model.label[prob_max_idx];
            }
            else
                return svm_predict(model, x);
        }
Пример #6
0
        public static double svm_predict(svm_model model, svm_node[] x)
        {
            if (model.param.svm_type == svm_parameter.ONE_CLASS || model.param.svm_type == svm_parameter.EPSILON_SVR ||
                model.param.svm_type == svm_parameter.NU_SVR)
            {
                var res = new double[1];
                svm_predict_values(model, x, res);

                if (model.param.svm_type == svm_parameter.ONE_CLASS)
                    return (res[0] > 0) ? 1 : - 1;
                else
                    return res[0];
            }
            else
            {
                int i;
                int nr_class = model.nr_class;
                var dec_values = new double[nr_class*(nr_class - 1)/2];
                svm_predict_values(model, x, dec_values);

                var vote = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    vote[i] = 0;
                int pos = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        if (dec_values[pos++] > 0)
                            ++vote[i];
                        else
                            ++vote[j];
                    }

                int vote_max_idx = 0;
                for (i = 1; i < nr_class; i++)
                    if (vote[i] > vote[vote_max_idx])
                        vote_max_idx = i;
                return model.label[vote_max_idx];
            }
        }
Пример #7
0
        public static void svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
        {
            if (model.param.svm_type == svm_parameter.ONE_CLASS || model.param.svm_type == svm_parameter.EPSILON_SVR ||
                model.param.svm_type == svm_parameter.NU_SVR)
            {
                double[] sv_coef = model.sv_coef[0];
                double sum = 0;
                for (int i = 0; i < model.l; i++)
                    sum += sv_coef[i]*Kernel.k_function(x, model.SV[i], model.param);
                sum -= model.rho[0];
                dec_values[0] = sum;
            }
            else
            {
                int i;
                int nr_class = model.nr_class;
                int l = model.l;

                var kvalue = new double[l];
                for (i = 0; i < l; i++)
                    kvalue[i] = Kernel.k_function(x, model.SV[i], model.param);

                var start = new int[nr_class];
                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + model.nSV[i - 1];

                int p = 0;
                int pos = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        double sum = 0;
                        int si = start[i];
                        int sj = start[j];
                        int ci = model.nSV[i];
                        int cj = model.nSV[j];

                        int k;
                        double[] coef1 = model.sv_coef[j - 1];
                        double[] coef2 = model.sv_coef[i];
                        for (k = 0; k < ci; k++)
                            sum += coef1[si + k]*kvalue[si + k];
                        for (k = 0; k < cj; k++)
                            sum += coef2[sj + k]*kvalue[sj + k];
                        sum -= model.rho[p++];
                        dec_values[pos++] = sum;
                    }
            }
        }
Пример #8
0
        //
        // Interface functions
        //
        public static svm_model svm_train(svm_problem prob, svm_parameter param)
        {
            var model = new svm_model();
            model.param = param;

            if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR ||
                param.svm_type == svm_parameter.NU_SVR)
            {
                // regression or one-class-svm
                model.nr_class = 2;
                model.label = null;
                model.nSV = null;
                model.probA = null;
                model.probB = null;
                model.sv_coef = new double[1][];

                if (param.probability == 1 &&
                    (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR))
                {
                    model.probA = new double[1];
                    model.probA[0] = svm_svr_probability(prob, param);
                }

                decision_function f = svm_train_one(prob, param, 0, 0);
                model.rho = new double[1];
                model.rho[0] = f.rho;

                int nSV = 0;
                int i;
                for (i = 0; i < prob.l; i++)
                    if (Math.Abs(f.alpha[i]) > 0)
                        ++nSV;
                model.l = nSV;
                model.SV = new svm_node[nSV][];
                model.sv_coef[0] = new double[nSV];
                int j = 0;
                for (i = 0; i < prob.l; i++)
                    if (Math.Abs(f.alpha[i]) > 0)
                    {
                        model.SV[j] = prob.x[i];
                        model.sv_coef[0][j] = f.alpha[i];
                        ++j;
                    }
            }
            else
            {
                // classification
                // find out the number of classes
                int l = prob.l;
                int max_nr_class = 16;
                int nr_class = 0;
                var label = new int[max_nr_class];
                var count = new int[max_nr_class];
                var index = new int[l];

                int i;
                for (i = 0; i < l; i++)
                {
                    //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
                    var this_label = (int) prob.y[i];
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (this_label == label[j])
                        {
                            ++count[j];
                            break;
                        }
                    index[i] = j;
                    if (j == nr_class)
                    {
                        if (nr_class == max_nr_class)
                        {
                            max_nr_class *= 2;
                            var new_data = new int[max_nr_class];
                            Array.Copy(label, 0, new_data, 0, label.Length);
                            label = new_data;

                            new_data = new int[max_nr_class];
                            Array.Copy(count, 0, new_data, 0, count.Length);
                            count = new_data;
                        }
                        label[nr_class] = this_label;
                        count[nr_class] = 1;
                        ++nr_class;
                    }
                }

                // group training data of the same class

                var start = new int[nr_class];
                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                var x = new svm_node[l][];

                for (i = 0; i < l; i++)
                {
                    x[start[index[i]]] = prob.x[i];
                    ++start[index[i]];
                }

                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                // calculate weighted C

                var weighted_C = new double[nr_class];
                for (i = 0; i < nr_class; i++)
                    weighted_C[i] = param.C;
                for (i = 0; i < param.nr_weight; i++)
                {
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (param.weight_label[i] == label[j])
                            break;
                    if (j == nr_class)
                        Console.Error.Write("warning: class label " + param.weight_label[i] +
                                            " specified in weight is not found\n");
                    else
                        weighted_C[j] *= param.weight[i];
                }

                // train k*(k-1)/2 models

                var nonzero = new bool[l];
                for (i = 0; i < l; i++)
                    nonzero[i] = false;
                var f = new decision_function[nr_class*(nr_class - 1)/2];

                double[] probA = null, probB = null;
                if (param.probability == 1)
                {
                    probA = new double[nr_class*(nr_class - 1)/2];
                    probB = new double[nr_class*(nr_class - 1)/2];
                }

                int p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        var sub_prob = new svm_problem();
                        int si = start[i], sj = start[j];
                        int ci = count[i], cj = count[j];
                        sub_prob.l = ci + cj;
                        sub_prob.x = new svm_node[sub_prob.l][];
                        sub_prob.y = new double[sub_prob.l];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            sub_prob.x[k] = x[si + k];
                            sub_prob.y[k] = + 1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            sub_prob.x[ci + k] = x[sj + k];
                            sub_prob.y[ci + k] = - 1;
                        }

                        if (param.probability == 1)
                        {
                            var probAB = new double[2];
                            svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
                            probA[p] = probAB[0];
                            probB[p] = probAB[1];
                        }

                        f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
                        for (k = 0; k < ci; k++)
                            if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0)
                                nonzero[si + k] = true;
                        for (k = 0; k < cj; k++)
                            if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0)
                                nonzero[sj + k] = true;
                        ++p;
                    }

                // build output

                model.nr_class = nr_class;

                model.label = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    model.label[i] = label[i];

                model.rho = new double[nr_class*(nr_class - 1)/2];
                for (i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    model.rho[i] = f[i].rho;

                if (param.probability == 1)
                {
                    model.probA = new double[nr_class*(nr_class - 1)/2];
                    model.probB = new double[nr_class*(nr_class - 1)/2];
                    for (i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    {
                        model.probA[i] = probA[i];
                        model.probB[i] = probB[i];
                    }
                }
                else
                {
                    model.probA = null;
                    model.probB = null;
                }

                int nnz = 0;
                var nz_count = new int[nr_class];
                model.nSV = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    int nSV = 0;
                    for (int j = 0; j < count[i]; j++)
                        if (nonzero[start[i] + j])
                        {
                            ++nSV;
                            ++nnz;
                        }
                    model.nSV[i] = nSV;
                    nz_count[i] = nSV;
                }

                Console.Out.Write("Total nSV = " + nnz + "\n");

                model.l = nnz;
                model.SV = new svm_node[nnz][];
                p = 0;
                for (i = 0; i < l; i++)
                    if (nonzero[i])
                        model.SV[p++] = x[i];

                var nz_start = new int[nr_class];
                nz_start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

                model.sv_coef = new double[nr_class - 1][];
                for (i = 0; i < nr_class - 1; i++)
                    model.sv_coef[i] = new double[nnz];

                p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]

                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];

                        int q = nz_start[i];
                        int k;
                        for (k = 0; k < ci; k++)
                            if (nonzero[si + k])
                                model.sv_coef[j - 1][q++] = f[p].alpha[k];
                        q = nz_start[j];
                        for (k = 0; k < cj; k++)
                            if (nonzero[sj + k])
                                model.sv_coef[i][q++] = f[p].alpha[ci + k];
                        ++p;
                    }
            }
            return model;
        }
        /// <summary>
        /// Convert regular Encog MLData into the "sparse" data needed by an SVM.
        /// </summary>
        ///
        /// <param name="data">The data to convert.</param>
        /// <returns>The SVM sparse data.</returns>
        public svm_node[] MakeSparse(IMLData data)
        {
            var result = new svm_node[data.Count];
            for (int i = 0; i < data.Count; i++)
            {
                result[i] = new svm_node {index = i + 1, value_Renamed = data[i]};
            }

            return result;
        }
Пример #10
0
        /// <summary>
        /// Convert regular Encog NeuralData into the "sparse" data needed by an SVM. 
        /// </summary>
        /// <param name="data">The data to convert.</param>
        /// <returns>The SVM sparse data.</returns>
        public svm_node[] MakeSparse(INeuralData data)
        {
            svm_node[] result = new svm_node[data.Count];
            for (int i = 0; i < data.Count; i++)
            {
                result[i] = new svm_node();
                result[i].index = i + 1;
                result[i].value_Renamed = data[i];
            }

            return result;
        }
Пример #11
0
 public svm_node[] MakeSparse(IMLData data)
 {
     svm_node[] _nodeArray = new svm_node[data.Count];
     for (int i = 0; i < data.Count; i++)
     {
         _nodeArray[i] = new svm_node { index = i + 1, value_Renamed = data[i] };
         if (0 == 0)
         {
         }
     }
     return _nodeArray;
 }
Пример #12
0
 internal static double x99240096a9e3842c(svm_node[] x08db3aeabb253cb1, svm_node[] x1e218ceaee1bb583)
 {
     int num4;
     int num5;
     double num = 0.0;
     int length = x08db3aeabb253cb1.Length;
     int num3 = x1e218ceaee1bb583.Length;
     if (0 == 0)
     {
         num4 = 0;
         num5 = 0;
     }
     goto Label_007F;
     Label_000F:
     if ((((uint) num5) - ((uint) num4)) <= uint.MaxValue)
     {
         if (0x7fffffff != 0)
         {
         }
         return num;
     }
     Label_007F:
     if (num4 < length)
     {
         goto Label_00BC;
     }
     if (((((uint) length) | 4) != 0) && ((((uint) length) + ((uint) num)) >= 0))
     {
         if ((((uint) num) & 0) != 0)
         {
             goto Label_00A4;
         }
         if ((((uint) num3) + ((uint) num5)) >= 0)
         {
             return num;
         }
         goto Label_00D0;
     }
     goto Label_000F;
     Label_00A4:
     if ((((uint) num) - ((uint) num)) >= 0)
     {
         goto Label_007F;
     }
     Label_00BC:
     if (num5 < num3)
     {
         if (x08db3aeabb253cb1[num4].index == x1e218ceaee1bb583[num5].index)
         {
             num += x08db3aeabb253cb1[num4++].value_Renamed * x1e218ceaee1bb583[num5++].value_Renamed;
         }
         else if ((((uint) length) + ((uint) num5)) <= uint.MaxValue)
         {
             goto Label_00D0;
         }
         goto Label_007F;
     }
     goto Label_000F;
     Label_00D0:
     if (x08db3aeabb253cb1[num4].index > x1e218ceaee1bb583[num5].index)
     {
         num5++;
         goto Label_007F;
     }
     num4++;
     goto Label_00A4;
 }
Пример #13
0
        internal static double xc60aa42cfbd0c1ed(svm_node[] x08db3aeabb253cb1, svm_node[] x1e218ceaee1bb583, svm_parameter x0d173b5435b4d6ad)
        {
            double num;
            int length;
            int num3;
            int num4;
            int num5;
            int num7 = x0d173b5435b4d6ad.kernel_type;
            switch (num7)
            {
                case 0:
                    return x99240096a9e3842c(x08db3aeabb253cb1, x1e218ceaee1bb583);

                case 1:
                    return Math.Pow((x0d173b5435b4d6ad.gamma * x99240096a9e3842c(x08db3aeabb253cb1, x1e218ceaee1bb583)) + x0d173b5435b4d6ad.coef0, x0d173b5435b4d6ad.degree);

                case 2:
                    num = 0.0;
                    length = x08db3aeabb253cb1.Length;
                    num3 = x1e218ceaee1bb583.Length;
                    num4 = 0;
                    num5 = 0;
                    goto Label_0095;

                case 3:
                    return xcb156489dc62ed23((x0d173b5435b4d6ad.gamma * x99240096a9e3842c(x08db3aeabb253cb1, x1e218ceaee1bb583)) + x0d173b5435b4d6ad.coef0);

                default:
                    goto Label_021B;
            }
            Label_008D:
            if (num4 < length)
            {
                num += x08db3aeabb253cb1[num4].value_Renamed * x08db3aeabb253cb1[num4].value_Renamed;
                if ((((uint) length) - ((uint) num)) <= uint.MaxValue)
                {
                    goto Label_0111;
                }
                goto Label_00DF;
            }
            if ((((uint) length) + ((uint) num4)) < 0)
            {
                goto Label_021B;
            }
            while (num5 < num3)
            {
                num += x1e218ceaee1bb583[num5].value_Renamed * x1e218ceaee1bb583[num5].value_Renamed;
                num5++;
            }
            return Math.Exp(-x0d173b5435b4d6ad.gamma * num);
            Label_0095:
            if (num4 < length)
            {
                if (num5 < num3)
                {
                    if (x08db3aeabb253cb1[num4].index != x1e218ceaee1bb583[num5].index)
                    {
                        goto Label_00DF;
                    }
                    double num6 = x08db3aeabb253cb1[num4++].value_Renamed - x1e218ceaee1bb583[num5++].value_Renamed;
                    num += num6 * num6;
                    goto Label_0095;
                }
                goto Label_008D;
            }
            if ((((uint) num5) + ((uint) num7)) < 0)
            {
                goto Label_00F2;
            }
            if ((((uint) num) - ((uint) length)) <= uint.MaxValue)
            {
                goto Label_008D;
            }
            Label_00DF:
            if (x08db3aeabb253cb1[num4].index > x1e218ceaee1bb583[num5].index)
            {
                num += x1e218ceaee1bb583[num5].value_Renamed * x1e218ceaee1bb583[num5].value_Renamed;
                num5++;
                goto Label_0095;
            }
            Label_00F2:
            num += x08db3aeabb253cb1[num4].value_Renamed * x08db3aeabb253cb1[num4].value_Renamed;
            num4++;
            if (0xff != 0)
            {
                goto Label_0095;
            }
            Label_0111:
            if ((((uint) num5) - ((uint) length)) <= uint.MaxValue)
            {
                num4++;
            }
            goto Label_008D;
            Label_021B:
            return 0.0;
        }
Пример #14
0
 public static svm_problem Encode(IMLDataSet training, int outputIndex)
 {
     svm_problem _problem3;
     try
     {
         svm_problem _problem;
         int num;
         int num2;
         svm_problem _problem2 = new svm_problem();
         goto Label_0158;
     Label_000C:
         if (1 == 0)
         {
             return _problem3;
         }
         if ((((uint) num2) + ((uint) num2)) < 0)
         {
             goto Label_018B;
         }
     Label_0031:
         if (num >= _problem.l)
         {
             num2 = 0;
             using (IEnumerator<IMLDataPair> enumerator = training.GetEnumerator())
             {
                 IMLDataPair pair;
                 IMLData input;
                 IMLData data2;
                 int num3;
                 svm_node _node;
                 goto Label_0083;
             Label_0049:
                 num3++;
             Label_004F:
                 if (num3 < input.Count)
                 {
                     goto Label_00CA;
                 }
                 _problem.y[num2] = data2[outputIndex];
                 if (((uint) outputIndex) < 0)
                 {
                     return _problem;
                 }
                 num2++;
             Label_0083:
                 if (enumerator.MoveNext())
                 {
                     goto Label_0100;
                 }
                 return _problem;
             Label_008E:
                 data2 = pair.Ideal;
                 if ((((uint) num3) + ((uint) num2)) >= 0)
                 {
                     _problem.x[num2] = new svm_node[input.Count];
                     num3 = 0;
                     goto Label_004F;
                 }
             Label_00CA:
                 _node = new svm_node();
                 _node.index = num3 + 1;
                 _node.value_Renamed = input[num3];
                 _problem.x[num2][num3] = _node;
                 goto Label_0049;
             Label_0100:
                 pair = enumerator.Current;
                 input = pair.Input;
                 goto Label_008E;
             }
         }
         _problem.x[num] = new svm_node[training.InputSize];
         num++;
         if ((((uint) outputIndex) & 0) == 0)
         {
             goto Label_000C;
         }
         return _problem3;
     Label_0158:
         _problem2.l = (int) training.Count;
         _problem = _problem2;
         _problem.y = new double[_problem.l];
         _problem.x = new svm_node[_problem.l][];
     Label_018B:
         num = 0;
         goto Label_0031;
     }
     catch (OutOfMemoryException)
     {
         throw new EncogError("SVM Model - Out of Memory");
     }
     return _problem3;
 }