Exemple #1
0
        //
        // Interface functions
        //
        public static svm_model svm_train(svm_problem prob, svm_parameter param)
        {
            var model = new svm_model();
            model.param = param;

            if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR ||
                param.svm_type == svm_parameter.NU_SVR)
            {
                // regression or one-class-svm
                model.nr_class = 2;
                model.label = null;
                model.nSV = null;
                model.probA = null;
                model.probB = null;
                model.sv_coef = new double[1][];

                if (param.probability == 1 &&
                    (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR))
                {
                    model.probA = new double[1];
                    model.probA[0] = svm_svr_probability(prob, param);
                }

                decision_function f = svm_train_one(prob, param, 0, 0);
                model.rho = new double[1];
                model.rho[0] = f.rho;

                int nSV = 0;
                int i;
                for (i = 0; i < prob.l; i++)
                    if (Math.Abs(f.alpha[i]) > 0)
                        ++nSV;
                model.l = nSV;
                model.SV = new svm_node[nSV][];
                model.sv_coef[0] = new double[nSV];
                int j = 0;
                for (i = 0; i < prob.l; i++)
                    if (Math.Abs(f.alpha[i]) > 0)
                    {
                        model.SV[j] = prob.x[i];
                        model.sv_coef[0][j] = f.alpha[i];
                        ++j;
                    }
            }
            else
            {
                // classification
                // find out the number of classes
                int l = prob.l;
                int max_nr_class = 16;
                int nr_class = 0;
                var label = new int[max_nr_class];
                var count = new int[max_nr_class];
                var index = new int[l];

                int i;
                for (i = 0; i < l; i++)
                {
                    //UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
                    var this_label = (int) prob.y[i];
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (this_label == label[j])
                        {
                            ++count[j];
                            break;
                        }
                    index[i] = j;
                    if (j == nr_class)
                    {
                        if (nr_class == max_nr_class)
                        {
                            max_nr_class *= 2;
                            var new_data = new int[max_nr_class];
                            Array.Copy(label, 0, new_data, 0, label.Length);
                            label = new_data;

                            new_data = new int[max_nr_class];
                            Array.Copy(count, 0, new_data, 0, count.Length);
                            count = new_data;
                        }
                        label[nr_class] = this_label;
                        count[nr_class] = 1;
                        ++nr_class;
                    }
                }

                // group training data of the same class

                var start = new int[nr_class];
                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                var x = new svm_node[l][];

                for (i = 0; i < l; i++)
                {
                    x[start[index[i]]] = prob.x[i];
                    ++start[index[i]];
                }

                start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    start[i] = start[i - 1] + count[i - 1];

                // calculate weighted C

                var weighted_C = new double[nr_class];
                for (i = 0; i < nr_class; i++)
                    weighted_C[i] = param.C;
                for (i = 0; i < param.nr_weight; i++)
                {
                    int j;
                    for (j = 0; j < nr_class; j++)
                        if (param.weight_label[i] == label[j])
                            break;
                    if (j == nr_class)
                        Console.Error.Write("warning: class label " + param.weight_label[i] +
                                            " specified in weight is not found\n");
                    else
                        weighted_C[j] *= param.weight[i];
                }

                // train k*(k-1)/2 models

                var nonzero = new bool[l];
                for (i = 0; i < l; i++)
                    nonzero[i] = false;
                var f = new decision_function[nr_class*(nr_class - 1)/2];

                double[] probA = null, probB = null;
                if (param.probability == 1)
                {
                    probA = new double[nr_class*(nr_class - 1)/2];
                    probB = new double[nr_class*(nr_class - 1)/2];
                }

                int p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        var sub_prob = new svm_problem();
                        int si = start[i], sj = start[j];
                        int ci = count[i], cj = count[j];
                        sub_prob.l = ci + cj;
                        sub_prob.x = new svm_node[sub_prob.l][];
                        sub_prob.y = new double[sub_prob.l];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            sub_prob.x[k] = x[si + k];
                            sub_prob.y[k] = + 1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            sub_prob.x[ci + k] = x[sj + k];
                            sub_prob.y[ci + k] = - 1;
                        }

                        if (param.probability == 1)
                        {
                            var probAB = new double[2];
                            svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
                            probA[p] = probAB[0];
                            probB[p] = probAB[1];
                        }

                        f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
                        for (k = 0; k < ci; k++)
                            if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0)
                                nonzero[si + k] = true;
                        for (k = 0; k < cj; k++)
                            if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0)
                                nonzero[sj + k] = true;
                        ++p;
                    }

                // build output

                model.nr_class = nr_class;

                model.label = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    model.label[i] = label[i];

                model.rho = new double[nr_class*(nr_class - 1)/2];
                for (i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    model.rho[i] = f[i].rho;

                if (param.probability == 1)
                {
                    model.probA = new double[nr_class*(nr_class - 1)/2];
                    model.probB = new double[nr_class*(nr_class - 1)/2];
                    for (i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    {
                        model.probA[i] = probA[i];
                        model.probB[i] = probB[i];
                    }
                }
                else
                {
                    model.probA = null;
                    model.probB = null;
                }

                int nnz = 0;
                var nz_count = new int[nr_class];
                model.nSV = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    int nSV = 0;
                    for (int j = 0; j < count[i]; j++)
                        if (nonzero[start[i] + j])
                        {
                            ++nSV;
                            ++nnz;
                        }
                    model.nSV[i] = nSV;
                    nz_count[i] = nSV;
                }

                Console.Out.Write("Total nSV = " + nnz + "\n");

                model.l = nnz;
                model.SV = new svm_node[nnz][];
                p = 0;
                for (i = 0; i < l; i++)
                    if (nonzero[i])
                        model.SV[p++] = x[i];

                var nz_start = new int[nr_class];
                nz_start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

                model.sv_coef = new double[nr_class - 1][];
                for (i = 0; i < nr_class - 1; i++)
                    model.sv_coef[i] = new double[nnz];

                p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]

                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];

                        int q = nz_start[i];
                        int k;
                        for (k = 0; k < ci; k++)
                            if (nonzero[si + k])
                                model.sv_coef[j - 1][q++] = f[p].alpha[k];
                        q = nz_start[j];
                        for (k = 0; k < cj; k++)
                            if (nonzero[sj + k])
                                model.sv_coef[i][q++] = f[p].alpha[ci + k];
                        ++p;
                    }
            }
            return model;
        }
Exemple #2
0
        internal static decision_function svm_train_one(svm_problem prob, svm_parameter param, double Cp, double Cn)
        {
            var alpha = new double[prob.l];
            var si = new Solver.SolutionInfo();
            switch (param.svm_type)
            {
                case svm_parameter.C_SVC:
                    solve_c_svc(prob, param, alpha, si, Cp, Cn);
                    break;

                case svm_parameter.NU_SVC:
                    solve_nu_svc(prob, param, alpha, si);
                    break;

                case svm_parameter.ONE_CLASS:
                    solve_one_class(prob, param, alpha, si);
                    break;

                case svm_parameter.EPSILON_SVR:
                    solve_epsilon_svr(prob, param, alpha, si);
                    break;

                case svm_parameter.NU_SVR:
                    solve_nu_svr(prob, param, alpha, si);
                    break;
            }

            Console.Out.Write("obj = " + si.obj + ", rho = " + si.rho + "\n");

            // output SVs

            int nSV = 0;
            int nBSV = 0;
            for (int i = 0; i < prob.l; i++)
            {
                if (Math.Abs(alpha[i]) > 0)
                {
                    ++nSV;
                    if (prob.y[i] > 0)
                    {
                        if (Math.Abs(alpha[i]) >= si.upper_bound_p)
                            ++nBSV;
                    }
                    else
                    {
                        if (Math.Abs(alpha[i]) >= si.upper_bound_n)
                            ++nBSV;
                    }
                }
            }

            Console.Out.Write("nSV = " + nSV + ", nBSV = " + nBSV + "\n");

            var f = new decision_function();
            f.alpha = alpha;
            f.rho = si.rho;
            return f;
        }
Exemple #3
0
        static decision_function svm_train_one(Problem prob, Parameter param, double Cp, double Cn)
        {
            double[] alpha = new double[prob.Count];
            Solver.SolutionInfo si = new Solver.SolutionInfo();
            switch (param.SvmType)
            {
                case SvmType.C_SVC:
                    solve_c_svc(prob, param, alpha, si, Cp, Cn);
                    break;
                case SvmType.NU_SVC:
                    solve_nu_svc(prob, param, alpha, si);
                    break;
                case SvmType.ONE_CLASS:
                    solve_one_class(prob, param, alpha, si);
                    break;
                case SvmType.EPSILON_SVR:
                    solve_epsilon_svr(prob, param, alpha, si);
                    break;
                case SvmType.NU_SVR:
                    solve_nu_svr(prob, param, alpha, si);
                    break;
            }

            Procedures.info("obj = " + si.obj + ", rho = " + si.rho + "\n");

            // output SVs

            int nSV = 0;
            int nBSV = 0;
            for (int i = 0; i < prob.Count; i++)
            {
                if (Math.Abs(alpha[i]) > 0)
                {
                    ++nSV;
                    if (prob.Y[i] > 0)
                    {
                        if (Math.Abs(alpha[i]) >= si.upper_bound_p)
                            ++nBSV;
                    }
                    else
                    {
                        if (Math.Abs(alpha[i]) >= si.upper_bound_n)
                            ++nBSV;
                    }
                }
            }

            Procedures.info("nSV = " + nSV + ", nBSV = " + nBSV + "\n");

            decision_function f = new decision_function();
            f.alpha = alpha;
            f.rho = si.rho;
            return f;
        }
Exemple #4
0
        //
        // Interface functions
        //
        public static Model svm_train(Problem prob, Parameter param)
        {
            Model model = new Model();
            model.Parameter = param;

            if (param.SvmType == SvmType.ONE_CLASS ||
               param.SvmType == SvmType.EPSILON_SVR ||
               param.SvmType == SvmType.NU_SVR)
            {
                // regression or one-class-svm
                model.NumberOfClasses = 2;
                model.ClassLabels = null;
                model.NumberOfSVPerClass = null;
                model.PairwiseProbabilityA = null; model.PairwiseProbabilityB = null;
                model.SupportVectorCoefficients = new double[1][];

                if (param.Probability &&
                   (param.SvmType == SvmType.EPSILON_SVR ||
                    param.SvmType == SvmType.NU_SVR))
                {
                    model.PairwiseProbabilityA = new double[1];
                    model.PairwiseProbabilityA[0] = svm_svr_probability(prob, param);
                }

                decision_function f = svm_train_one(prob, param, 0, 0);
                model.Rho = new double[1];
                model.Rho[0] = f.rho;

                int nSV = 0;
                int i;
                for (i = 0; i < prob.Count; i++)
                    if (Math.Abs(f.alpha[i]) > 0) ++nSV;
                model.SupportVectorCount = nSV;
                model.SupportVectors = new Node[nSV][];
                model.SupportVectorCoefficients[0] = new double[nSV];
                int j = 0;
                for (i = 0; i < prob.Count; i++)
                    if (Math.Abs(f.alpha[i]) > 0)
                    {
                        model.SupportVectors[j] = prob.X[i];
                        model.SupportVectorCoefficients[0][j] = f.alpha[i];
                        ++j;
                    }
            }
            else
            {
                // classification
                int l = prob.Count;
                int[] tmp_nr_class = new int[1];
                int[][] tmp_label = new int[1][];
                int[][] tmp_start = new int[1][];
                int[][] tmp_count = new int[1][];
                int[] perm = new int[l];

                // group training data of the same class
                svm_group_classes(prob, tmp_nr_class, tmp_label, tmp_start, tmp_count, perm);
                int nr_class = tmp_nr_class[0];
                int[] label = tmp_label[0];
                int[] start = tmp_start[0];
                int[] count = tmp_count[0];
                Node[][] x = new Node[l][];
                int i;
                for (i = 0; i < l; i++)
                    x[i] = prob.X[perm[i]];

                // calculate weighted C

                double[] weighted_C = new double[nr_class];
                for (i = 0; i < nr_class; i++)
                    weighted_C[i] = param.C;
                foreach (int weightedLabel in param.Weights.Keys)
                {
                    int index = Array.IndexOf<int>(label, weightedLabel);
                    if (index < 0)
                        Console.Error.WriteLine("warning: class label " + weightedLabel + " specified in weight is not found");
                    else weighted_C[index] *= param.Weights[weightedLabel];
                }

                // train k*(k-1)/2 models

                bool[] nonzero = new bool[l];
                for (i = 0; i < l; i++)
                    nonzero[i] = false;
                decision_function[] f = new decision_function[nr_class * (nr_class - 1) / 2];

                double[] probA = null, probB = null;
                if (param.Probability)
                {
                    probA = new double[nr_class * (nr_class - 1) / 2];
                    probB = new double[nr_class * (nr_class - 1) / 2];
                }

                int p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        Problem sub_prob = new Problem();
                        int si = start[i], sj = start[j];
                        int ci = count[i], cj = count[j];
                        sub_prob.Count = ci + cj;
                        sub_prob.X = new Node[sub_prob.Count][];
                        sub_prob.Y = new double[sub_prob.Count];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            sub_prob.X[k] = x[si + k];
                            sub_prob.Y[k] = +1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            sub_prob.X[ci + k] = x[sj + k];
                            sub_prob.Y[ci + k] = -1;
                        }

                        if (param.Probability)
                        {
                            double[] probAB = new double[2];
                            svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
                            probA[p] = probAB[0];
                            probB[p] = probAB[1];
                        }

                        f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
                        for (k = 0; k < ci; k++)
                            if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0)
                                nonzero[si + k] = true;
                        for (k = 0; k < cj; k++)
                            if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0)
                                nonzero[sj + k] = true;
                        ++p;
                    }

                // build output

                model.NumberOfClasses = nr_class;

                model.ClassLabels = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                    model.ClassLabels[i] = label[i];

                model.Rho = new double[nr_class * (nr_class - 1) / 2];
                for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    model.Rho[i] = f[i].rho;

                if (param.Probability)
                {
                    model.PairwiseProbabilityA = new double[nr_class * (nr_class - 1) / 2];
                    model.PairwiseProbabilityB = new double[nr_class * (nr_class - 1) / 2];
                    for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    {
                        model.PairwiseProbabilityA[i] = probA[i];
                        model.PairwiseProbabilityB[i] = probB[i];
                    }
                }
                else
                {
                    model.PairwiseProbabilityA = null;
                    model.PairwiseProbabilityB = null;
                }

                int nnz = 0;
                int[] nz_count = new int[nr_class];
                model.NumberOfSVPerClass = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    int nSV = 0;
                    for (int j = 0; j < count[i]; j++)
                        if (nonzero[start[i] + j])
                        {
                            ++nSV;
                            ++nnz;
                        }
                    model.NumberOfSVPerClass[i] = nSV;
                    nz_count[i] = nSV;
                }

                Procedures.info("Total nSV = " + nnz + "\n");

                model.SupportVectorCount = nnz;
                model.SupportVectors = new Node[nnz][];
                p = 0;
                for (i = 0; i < l; i++)
                    if (nonzero[i]) model.SupportVectors[p++] = x[i];

                int[] nz_start = new int[nr_class];
                nz_start[0] = 0;
                for (i = 1; i < nr_class; i++)
                    nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

                model.SupportVectorCoefficients = new double[nr_class - 1][];
                for (i = 0; i < nr_class - 1; i++)
                    model.SupportVectorCoefficients[i] = new double[nnz];

                p = 0;
                for (i = 0; i < nr_class; i++)
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]

                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];

                        int q = nz_start[i];
                        int k;
                        for (k = 0; k < ci; k++)
                            if (nonzero[si + k])
                                model.SupportVectorCoefficients[j - 1][q++] = f[p].alpha[k];
                        q = nz_start[j];
                        for (k = 0; k < cj; k++)
                            if (nonzero[sj + k])
                                model.SupportVectorCoefficients[i][q++] = f[p].alpha[ci + k];
                        ++p;
                    }
            }
            return model;
        }
Exemple #5
0
    //
    // Interface functions
    //
    public static svm_model svm_train(svm_problem prob, svm_parameter param) {
      svm_model model = new svm_model();
      model.param = param;

      if (param.svm_type == svm_parameter.ONE_CLASS ||
          param.svm_type == svm_parameter.EPSILON_SVR ||
          param.svm_type == svm_parameter.NU_SVR) {
        // regression or one-class-svm
        model.nr_class = 2;
        model.label = null;
        model.nSV = null;
        model.probA = null;
        model.probB = null;
        model.sv_coef = new double[1][];

        if (param.probability == 1 &&
            (param.svm_type == svm_parameter.EPSILON_SVR ||
             param.svm_type == svm_parameter.NU_SVR)) {
          model.probA = new double[1];
          model.probA[0] = svm_svr_probability(prob, param);
        }

        decision_function f = svm_train_one(prob, param, 0, 0);
        model.rho = new double[1];
        model.rho[0] = f.rho;

        int nSV = 0;
        int i;
        for (i = 0; i < prob.l; i++)
          if (Math.Abs(f.alpha[i]) > 0) ++nSV;
        model.l = nSV;
        model.SV = new svm_node[nSV][];
        model.sv_coef[0] = new double[nSV];
        int j = 0;
        for (i = 0; i < prob.l; i++)
          if (Math.Abs(f.alpha[i]) > 0) {
            model.SV[j] = prob.x[i];
            model.sv_coef[0][j] = f.alpha[i];
            ++j;
          }
      } else {
        // classification
        int l = prob.l;
        int[] tmp_nr_class = new int[1];
        int[][] tmp_label = new int[1][];
        int[][] tmp_start = new int[1][];
        int[][] tmp_count = new int[1][];
        int[] perm = new int[l];

        // group training data of the same class
        svm_group_classes(prob, tmp_nr_class, tmp_label, tmp_start, tmp_count, perm);
        int nr_class = tmp_nr_class[0];
        int[] label = tmp_label[0];
        int[] start = tmp_start[0];
        int[] count = tmp_count[0];

        if (nr_class == 1)
          svm.info("WARNING: training data in only one class. See README for details." + Environment.NewLine);

        svm_node[][] x = new svm_node[l][];
        int i;
        for (i = 0; i < l; i++)
          x[i] = prob.x[perm[i]];

        // calculate weighted C

        double[] weighted_C = new double[nr_class];
        for (i = 0; i < nr_class; i++)
          weighted_C[i] = param.C;
        for (i = 0; i < param.nr_weight; i++) {
          int j;
          for (j = 0; j < nr_class; j++)
            if (param.weight_label[i] == label[j])
              break;
          if (j == nr_class)
            Console.Error.WriteLine("WARNING: class label " + param.weight_label[i] +
                                    " specified in weight is not found");
          else
            weighted_C[j] *= param.weight[i];
        }

        // train k*(k-1)/2 models

        bool[] nonzero = new bool[l];
        for (i = 0; i < l; i++)
          nonzero[i] = false;
        decision_function[] f = new decision_function[nr_class * (nr_class - 1) / 2];

        double[] probA = null, probB = null;
        if (param.probability == 1) {
          probA = new double[nr_class * (nr_class - 1) / 2];
          probB = new double[nr_class * (nr_class - 1) / 2];
        }

        int p = 0;
        for (i = 0; i < nr_class; i++)
          for (int j = i + 1; j < nr_class; j++) {
            svm_problem sub_prob = new svm_problem();
            int si = start[i], sj = start[j];
            int ci = count[i], cj = count[j];
            sub_prob.l = ci + cj;
            sub_prob.x = new svm_node[sub_prob.l][];
            sub_prob.y = new double[sub_prob.l];
            int k;
            for (k = 0; k < ci; k++) {
              sub_prob.x[k] = x[si + k];
              sub_prob.y[k] = +1;
            }
            for (k = 0; k < cj; k++) {
              sub_prob.x[ci + k] = x[sj + k];
              sub_prob.y[ci + k] = -1;
            }

            if (param.probability == 1) {
              double[] probAB = new double[2];
              svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
              probA[p] = probAB[0];
              probB[p] = probAB[1];
            }

            f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
            for (k = 0; k < ci; k++)
              if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0)
                nonzero[si + k] = true;
            for (k = 0; k < cj; k++)
              if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0)
                nonzero[sj + k] = true;
            ++p;
          }

        // build output

        model.nr_class = nr_class;

        model.label = new int[nr_class];
        for (i = 0; i < nr_class; i++)
          model.label[i] = label[i];

        model.rho = new double[nr_class * (nr_class - 1) / 2];
        for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
          model.rho[i] = f[i].rho;

        if (param.probability == 1) {
          model.probA = new double[nr_class * (nr_class - 1) / 2];
          model.probB = new double[nr_class * (nr_class - 1) / 2];
          for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) {
            model.probA[i] = probA[i];
            model.probB[i] = probB[i];
          }
        } else {
          model.probA = null;
          model.probB = null;
        }

        int nnz = 0;
        int[] nz_count = new int[nr_class];
        model.nSV = new int[nr_class];
        for (i = 0; i < nr_class; i++) {
          int nSV = 0;
          for (int j = 0; j < count[i]; j++)
            if (nonzero[start[i] + j]) {
              ++nSV;
              ++nnz;
            }
          model.nSV[i] = nSV;
          nz_count[i] = nSV;
        }

        svm.info("Total nSV = " + nnz + Environment.NewLine);

        model.l = nnz;
        model.SV = new svm_node[nnz][];
        p = 0;
        for (i = 0; i < l; i++)
          if (nonzero[i]) model.SV[p++] = x[i];

        int[] nz_start = new int[nr_class];
        nz_start[0] = 0;
        for (i = 1; i < nr_class; i++)
          nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

        model.sv_coef = new double[nr_class - 1][];
        for (i = 0; i < nr_class - 1; i++)
          model.sv_coef[i] = new double[nnz];

        p = 0;
        for (i = 0; i < nr_class; i++)
          for (int j = i + 1; j < nr_class; j++) {
            // classifier (i,j): coefficients with
            // i are in sv_coef[j-1][nz_start[i]...],
            // j are in sv_coef[i][nz_start[j]...]

            int si = start[i];
            int sj = start[j];
            int ci = count[i];
            int cj = count[j];

            int q = nz_start[i];
            int k;
            for (k = 0; k < ci; k++)
              if (nonzero[si + k])
                model.sv_coef[j - 1][q++] = f[p].alpha[k];
            q = nz_start[j];
            for (k = 0; k < cj; k++)
              if (nonzero[sj + k])
                model.sv_coef[i][q++] = f[p].alpha[ci + k];
            ++p;
          }
      }
      return model;
    }