// // Interface functions // public static SvmModel Train(SvmProblem prob, SvmParameter param) { var model = new SvmModel(); model.Param = param; if (param.SvmType.IsSVROrOneClass()) { // regression or one-class-svm model.NrClass = 2; model.Label = null; model.SupportVectorsNumbers = null; model.ProbA = null; model.ProbB = null; model.SupportVectorsCoefficients = new double[1][]; if (param.Probability && param.SvmType.IsSVR()) { model.ProbA = new double[1]; model.ProbA[0] = svm_svr_probability(prob, param); } DecisionFunction f = svm_train_one(prob, param, 0, 0); model.Rho = new double[1]; model.Rho[0] = f.Rho; int nSV = 0; int i; for (i = 0; i < prob.Lenght; i++) if (Math.Abs(f.Alpha[i]) > 0) ++nSV; model.TotalSupportVectorsNumber = nSV; model.SupportVectors = new SvmNode[nSV][]; model.SupportVectorsCoefficients[0] = new double[nSV]; int j = 0; for (i = 0; i < prob.Lenght; i++) if (Math.Abs(f.Alpha[i]) > 0) { model.SupportVectors[j] = prob.X[i]; model.SupportVectorsCoefficients[0][j] = f.Alpha[i]; ++j; } } else { // classification int l = prob.Lenght; int[] perm = new int[l]; int nr_class; int[] label; int[] start; int[] count; // group training data of the same class svm_group_classes(prob, out nr_class, out label, out start, out count, perm); SvmNode[][] x = new SvmNode[l][]; int i; for (i = 0; i < l; i++) x[i] = prob.X[perm[i]]; // calculate weighted C double[] weighted_C = new double[nr_class]; for (i = 0; i < nr_class; i++) weighted_C[i] = param.C; for (i = 0; i < param.WeightsCount; i++) { int j; for (j = 0; j < nr_class; j++) if (param.WeightLabel[i] == label[j]) break; if (j == nr_class) Console.Error.WriteLine("warning: class label " + param.WeightLabel[i] + " specified in weight is not found\n"); else weighted_C[j] *= param.Weight[i]; } // train k*(k-1)/2 models var nonzero = new bool[l]; for (i = 0; i < l; i++) nonzero[i] = false; var f = new DecisionFunction[nr_class * (nr_class - 1) / 2]; double[] probA = null, probB = null; if (param.Probability) { probA = new double[nr_class * (nr_class - 1) / 2]; probB = new double[nr_class * (nr_class - 1) / 2]; } int p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { int si = start[i], sj = start[j]; int ci = count[i], cj = count[j]; var subprobLenght = ci + cj; var sub_prob = new SvmProblem { X = new SvmNode[subprobLenght][], Y = new double[subprobLenght] }; int k; for (k = 0; k < ci; k++) { sub_prob.X[k] = x[si + k]; sub_prob.Y[k] = +1; } for (k = 0; k < cj; k++) { sub_prob.X[ci + k] = x[sj + k]; sub_prob.Y[ci + k] = -1; } if (param.Probability) { double[] probAB = new double[2]; svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB); probA[p] = probAB[0]; probB[p] = probAB[1]; } f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]); for (k = 0; k < ci; k++) if (!nonzero[si + k] && Math.Abs(f[p].Alpha[k]) > 0) nonzero[si + k] = true; for (k = 0; k < cj; k++) if (!nonzero[sj + k] && Math.Abs(f[p].Alpha[ci + k]) > 0) nonzero[sj + k] = true; ++p; } // build output model.NrClass = nr_class; model.Label = new int[nr_class]; for (i = 0; i < nr_class; i++) model.Label[i] = label[i]; model.Rho = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) model.Rho[i] = f[i].Rho; if (param.Probability) { model.ProbA = new double[nr_class * (nr_class - 1) / 2]; model.ProbB = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) { model.ProbA[i] = probA[i]; model.ProbB[i] = probB[i]; } } else { model.ProbA = null; model.ProbB = null; } int nnz = 0; int[] nz_count = new int[nr_class]; model.SupportVectorsNumbers = new int[nr_class]; for (i = 0; i < nr_class; i++) { int nSV = 0; for (int j = 0; j < count[i]; j++) if (nonzero[start[i] + j]) { ++nSV; ++nnz; } model.SupportVectorsNumbers[i] = nSV; nz_count[i] = nSV; } Svm.info("Total nSV = " + nnz + "\n"); model.TotalSupportVectorsNumber = nnz; model.SupportVectors = new SvmNode[nnz][]; p = 0; for (i = 0; i < l; i++) if (nonzero[i]) model.SupportVectors[p++] = x[i]; int[] nz_start = new int[nr_class]; nz_start[0] = 0; for (i = 1; i < nr_class; i++) nz_start[i] = nz_start[i - 1] + nz_count[i - 1]; model.SupportVectorsCoefficients = new double[nr_class - 1][]; for (i = 0; i < nr_class - 1; i++) model.SupportVectorsCoefficients[i] = new double[nnz]; p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { // classifier (i,j): coefficients with // i are in sv_coef[j-1][nz_start[i]...], // j are in sv_coef[i][nz_start[j]...] int si = start[i]; int sj = start[j]; int ci = count[i]; int cj = count[j]; int q = nz_start[i]; int k; for (k = 0; k < ci; k++) if (nonzero[si + k]) model.SupportVectorsCoefficients[j - 1][q++] = f[p].Alpha[k]; q = nz_start[j]; for (k = 0; k < cj; k++) if (nonzero[sj + k]) model.SupportVectorsCoefficients[i][q++] = f[p].Alpha[ci + k]; ++p; } } return model; }
// // Interface functions // public static SvmModel Train(SvmProblem prob, SvmParameter param) { var model = new SvmModel(); model.Param = param; if (param.SvmType.IsSVROrOneClass()) { // regression or one-class-svm model.NrClass = 2; model.Label = null; model.SupportVectorsNumbers = null; model.ProbA = null; model.ProbB = null; model.SupportVectorsCoefficients = new double[1][]; if (param.Probability && param.SvmType.IsSVR()) { model.ProbA = new double[1]; model.ProbA[0] = svm_svr_probability(prob, param); } DecisionFunction f = svm_train_one(prob, param, 0, 0); model.Rho = new double[1]; model.Rho[0] = f.Rho; int nSV = 0; int i; for (i = 0; i < prob.Lenght; i++) { if (Math.Abs(f.Alpha[i]) > 0) { ++nSV; } } model.TotalSupportVectorsNumber = nSV; model.SupportVectors = new SvmNode[nSV][]; model.SupportVectorsCoefficients[0] = new double[nSV]; int j = 0; for (i = 0; i < prob.Lenght; i++) { if (Math.Abs(f.Alpha[i]) > 0) { model.SupportVectors[j] = prob.X[i]; model.SupportVectorsCoefficients[0][j] = f.Alpha[i]; ++j; } } } else { // classification int l = prob.Lenght; int[] perm = new int[l]; int nr_class; int[] label; int[] start; int[] count; // group training data of the same class svm_group_classes(prob, out nr_class, out label, out start, out count, perm); if (nr_class == 1) { Svm.info("WARNING: training data in only one class. See README for details.\n"); } SvmNode[][] x = new SvmNode[l][]; int i; for (i = 0; i < l; i++) { x[i] = prob.X[perm[i]]; } // calculate weighted C double[] weighted_C = new double[nr_class]; for (i = 0; i < nr_class; i++) { weighted_C[i] = param.C; } for (i = 0; i < param.WeightsCount; i++) { int j; for (j = 0; j < nr_class; j++) { if (param.WeightLabel[i] == label[j]) { break; } } if (j == nr_class) { System.Diagnostics.Debug.WriteLine("WARNING: class label " + param.WeightLabel[i] + " specified in weight is not found\n"); } else { weighted_C[j] *= param.Weight[i]; } } // train k*(k-1)/2 models var nonzero = new bool[l]; for (i = 0; i < l; i++) { nonzero[i] = false; } var f = new DecisionFunction[nr_class * (nr_class - 1) / 2]; double[] probA = null, probB = null; if (param.Probability) { probA = new double[nr_class * (nr_class - 1) / 2]; probB = new double[nr_class * (nr_class - 1) / 2]; } int p = 0; for (i = 0; i < nr_class; i++) { for (int j = i + 1; j < nr_class; j++) { int si = start[i], sj = start[j]; int ci = count[i], cj = count[j]; var subprobLenght = ci + cj; var sub_prob = new SvmProblem { X = new SvmNode[subprobLenght][], Y = new double[subprobLenght] }; int k; for (k = 0; k < ci; k++) { sub_prob.X[k] = x[si + k]; sub_prob.Y[k] = +1; } for (k = 0; k < cj; k++) { sub_prob.X[ci + k] = x[sj + k]; sub_prob.Y[ci + k] = -1; } if (param.Probability) { double[] probAB = new double[2]; svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB); probA[p] = probAB[0]; probB[p] = probAB[1]; } f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]); for (k = 0; k < ci; k++) { if (!nonzero[si + k] && Math.Abs(f[p].Alpha[k]) > 0) { nonzero[si + k] = true; } } for (k = 0; k < cj; k++) { if (!nonzero[sj + k] && Math.Abs(f[p].Alpha[ci + k]) > 0) { nonzero[sj + k] = true; } } ++p; } } // build output model.NrClass = nr_class; model.Label = new int[nr_class]; for (i = 0; i < nr_class; i++) { model.Label[i] = label[i]; } model.Rho = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) { model.Rho[i] = f[i].Rho; } if (param.Probability) { model.ProbA = new double[nr_class * (nr_class - 1) / 2]; model.ProbB = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) { model.ProbA[i] = probA[i]; model.ProbB[i] = probB[i]; } } else { model.ProbA = null; model.ProbB = null; } int nnz = 0; int[] nz_count = new int[nr_class]; model.SupportVectorsNumbers = new int[nr_class]; for (i = 0; i < nr_class; i++) { int nSV = 0; for (int j = 0; j < count[i]; j++) { if (nonzero[start[i] + j]) { ++nSV; ++nnz; } } model.SupportVectorsNumbers[i] = nSV; nz_count[i] = nSV; } Svm.info("Total nSV = " + nnz + "\n"); model.TotalSupportVectorsNumber = nnz; model.SupportVectors = new SvmNode[nnz][]; p = 0; for (i = 0; i < l; i++) { if (nonzero[i]) { model.SupportVectors[p++] = x[i]; } } int[] nz_start = new int[nr_class]; nz_start[0] = 0; for (i = 1; i < nr_class; i++) { nz_start[i] = nz_start[i - 1] + nz_count[i - 1]; } model.SupportVectorsCoefficients = new double[nr_class - 1][]; for (i = 0; i < nr_class - 1; i++) { model.SupportVectorsCoefficients[i] = new double[nnz]; } p = 0; for (i = 0; i < nr_class; i++) { for (int j = i + 1; j < nr_class; j++) { // classifier (i,j): coefficients with // i are in sv_coef[j-1][nz_start[i]...], // j are in sv_coef[i][nz_start[j]...] int si = start[i]; int sj = start[j]; int ci = count[i]; int cj = count[j]; int q = nz_start[i]; int k; for (k = 0; k < ci; k++) { if (nonzero[si + k]) { model.SupportVectorsCoefficients[j - 1][q++] = f[p].Alpha[k]; } } q = nz_start[j]; for (k = 0; k < cj; k++) { if (nonzero[sj + k]) { model.SupportVectorsCoefficients[i][q++] = f[p].Alpha[ci + k]; } } ++p; } } } return(model); }
private static DecisionFunction svm_train_one(SvmProblem prob, SvmParameter param, double Cp, double Cn) { double[] alpha = new double[prob.Lenght]; var si = new SolutionInfo(); switch (param.SvmType) { case SvmType.C_SVC: solve_c_svc(prob, param, alpha, si, Cp, Cn); break; case SvmType.NU_SVC: solve_nu_svc(prob, param, alpha, si); break; case SvmType.ONE_CLASS: solve_one_class(prob, param, alpha, si); break; case SvmType.EPSILON_SVR: solve_epsilon_svr(prob, param, alpha, si); break; case SvmType.NU_SVR: solve_nu_svr(prob, param, alpha, si); break; } Svm.info("obj = " + si.Obj + ", rho = " + si.Rho + "\n"); // output SVs int nSV = 0; int nBSV = 0; for (int i = 0; i < prob.Lenght; i++) { if (Math.Abs(alpha[i]) > 0) { ++nSV; if (prob.Y[i] > 0) { if (Math.Abs(alpha[i]) >= si.UpperBoundP) ++nBSV; } else { if (Math.Abs(alpha[i]) >= si.UpperBoundN) ++nBSV; } } } Svm.info("nSV = " + nSV + ", nBSV = " + nBSV + "\n"); var f = new DecisionFunction(alpha, si.Rho); return f; }
private static DecisionFunction svm_train_one(SvmProblem prob, SvmParameter param, double Cp, double Cn) { double[] alpha = new double[prob.Lenght]; var si = new SolutionInfo(); switch (param.SvmType) { case SvmType.C_SVC: solve_c_svc(prob, param, alpha, si, Cp, Cn); break; case SvmType.NU_SVC: solve_nu_svc(prob, param, alpha, si); break; case SvmType.ONE_CLASS: solve_one_class(prob, param, alpha, si); break; case SvmType.EPSILON_SVR: solve_epsilon_svr(prob, param, alpha, si); break; case SvmType.NU_SVR: solve_nu_svr(prob, param, alpha, si); break; } Svm.info("obj = " + si.Obj + ", rho = " + si.Rho + "\n"); // output SVs int nSV = 0; int nBSV = 0; for (int i = 0; i < prob.Lenght; i++) { if (Math.Abs(alpha[i]) > 0) { ++nSV; if (prob.Y[i] > 0) { if (Math.Abs(alpha[i]) >= si.UpperBoundP) { ++nBSV; } } else { if (Math.Abs(alpha[i]) >= si.UpperBoundN) { ++nBSV; } } } } Svm.info("nSV = " + nSV + ", nBSV = " + nBSV + "\n"); var f = new DecisionFunction(alpha, si.Rho); return(f); }