private static void solve_one_class(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si) { int l = prob.Lenght; double[] zeros = new double[l]; sbyte[] ones = new sbyte[l]; int i; int n = (int)(param.Nu * prob.Lenght); // # of alpha's at upper bound for (i = 0; i < n; i++) { alpha[i] = 1; } if (n < prob.Lenght) { alpha[n] = param.Nu * prob.Lenght - n; } for (i = n + 1; i < l; i++) { alpha[i] = 0; } for (i = 0; i < l; i++) { zeros[i] = 0; ones[i] = 1; } var s = new Solver(); s.Solve(l, new OneClassQ(prob, param), zeros, ones, alpha, 1.0, 1.0, param.Eps, si, param.Shrinking); }
private static void solve_epsilon_svr(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si) { int l = prob.Lenght; double[] alpha2 = new double[2 * l]; double[] linear_term = new double[2 * l]; sbyte[] y = new sbyte[2 * l]; int i; for (i = 0; i < l; i++) { alpha2[i] = 0; linear_term[i] = param.P - prob.Y[i]; y[i] = 1; alpha2[i + l] = 0; linear_term[i + l] = param.P + prob.Y[i]; y[i + l] = -1; } Solver s = new Solver(); s.Solve(2 * l, new SvrQ(prob, param), linear_term, y, alpha2, param.C, param.C, param.Eps, si, param.Shrinking); double sum_alpha = 0; for (i = 0; i < l; i++) { alpha[i] = alpha2[i] - alpha2[i + l]; sum_alpha += Math.Abs(alpha[i]); } Svm.info("nu = " + sum_alpha / (param.C * l) + "\n"); }
public OneClassQ(SvmProblem prob, SvmParameter param) : base(prob.Lenght, prob.X, param) { _cache = new Cache(prob.Lenght, (long)(param.CacheSize * (1 << 20))); _qd = new double[prob.Lenght]; for (int i = 0; i < prob.Lenght; i++) _qd[i] = kernel_function(i, i); }
public OneClassQ(SvmProblem prob, SvmParameter param) : base(prob.Lenght, prob.X, param) { _cache = new Cache(prob.Lenght, (long)(param.CacheSize * (1 << 20))); _qd = new double[prob.Lenght]; for (int i = 0; i < prob.Lenght; i++) { _qd[i] = kernel_function(i, i); } }
public SvcQ(SvmProblem prob, SvmParameter param, sbyte[] y_) : base(prob.Lenght, prob.X, param) { //super(prob.l, prob.x, param); y = (sbyte[])y_.Clone(); cache = new Cache(prob.Lenght, (long)(param.CacheSize * (1 << 20))); QD = new double[prob.Lenght]; for (int i = 0; i < prob.Lenght; i++) { QD[i] = kernel_function(i, i); } }
//from Svm.svm_check_parameter public void Check(SvmProblem prob) { if (Gamma < 0) { throw new Exception("gamma < 0"); } if (Degree < 0) { throw new Exception("degree of polynomial kernel < 0"); } // cache_size,eps,C,nu,p,shrinking if (CacheSize <= 0) { throw new Exception("cache_size <= 0"); } if (Eps <= 0) { throw new Exception("eps <= 0"); } if (SvmType.UseCParameter() && C <= 0) { throw new Exception("C <= 0"); } if (SvmType.UseNuParameter() && (Nu <= 0 || Nu > 1)) { throw new Exception("nu <= 0 or nu > 1"); } if (SvmType.UsePParameter() && P < 0) { throw new Exception("p < 0"); } if (Probability && SvmType.IsOneClass()) { throw new Exception("one-class SVM probability output not supported yet"); } // check whether nu-svc is feasible IsNuFeasible(prob); }
//from Svm.svm_check_parameter public void Check(SvmProblem prob) { if (Gamma < 0) { throw new ApplicationException("gamma < 0"); } if (Degree < 0) { throw new ApplicationException("degree of polynomial kernel < 0"); } // cache_size,eps,C,nu,p,shrinking if (CacheSize <= 0) { throw new ApplicationException("cache_size <= 0"); } if (Eps <= 0) { throw new ApplicationException("eps <= 0"); } if (SvmType.UseCParameter() && C <= 0) { throw new ApplicationException("C <= 0"); } if (SvmType.UseNuParameter() && (Nu <= 0 || Nu > 1)) { throw new ApplicationException("nu <= 0 or nu > 1"); } if (SvmType.UsePParameter() && P < 0) { throw new ApplicationException("p < 0"); } if (Probability && SvmType.IsOneClass()) { throw new ApplicationException("one-class SVM probability output not supported yet"); } // check whether nu-svc is feasible IsNuFeasible(prob); }
private static void solve_c_svc(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si, double Cp, double Cn) { int l = prob.Lenght; double[] minus_ones = new double[l]; sbyte[] y = new sbyte[l]; for (int i = 0; i < l; i++) { alpha[i] = 0; minus_ones[i] = -1; if (prob.Y[i] > 0) { y[i] = +1; } else { y[i] = -1; } } Solver s = new Solver(); s.Solve(l, new SvcQ(prob, param, y), minus_ones, y, alpha, Cp, Cn, param.Eps, si, param.Shrinking); double sum_alpha = 0; for (int i = 0; i < l; i++) { sum_alpha += alpha[i]; } if (Cp == Cn) { Svm.info("nu = " + sum_alpha / (Cp * prob.Lenght) + "\n"); } for (int i = 0; i < l; i++) { alpha[i] *= y[i]; } }
public static void Run() { Console.WriteLine("EpsSVRDemo"); var rnd = new Random(); var trainData = DemoHelper.Range(-10.0, 10.01, 0.1).Select(val => new { X = val, Y = DemoHelper.Sinc(val) + (rnd.NextDouble() - 0.5) / 4 }); var parameters = new SvmParameter { SvmType = SvmType.EPSILON_SVR, KernelType = KernelType.Rbf, Gamma = 0.5, CacheSize = 128, C = 1, Eps = 1e-3, P = 0.1, Shrinking = true, Probability = false }; var problem = new SvmProblem { Y = trainData.Select(p => p.Y).ToArray(), X = trainData.Select(p => p.X.ToSvmNodes()).ToArray() }; parameters.Check(problem); var model = Svm.Train(problem, parameters); foreach (var item in DemoHelper.Range(-1.0, 1.01, 0.1)) { var x = item.ToSvmNodes(); var yPred = model.Predict(x); var yReal = DemoHelper.Sinc(item); Console.WriteLine("x: {0}", item); Console.WriteLine("y_real: {0}", yReal); Console.WriteLine("y_pred: {0}", yPred); Console.WriteLine(); } }
public static void Run() { Console.WriteLine("OneClassDemo"); var trainData = DemoHelper.GenerateClass(0, 0.5, 0.5, 100); var parameters = new SvmParameter { SvmType = SvmType.ONE_CLASS, KernelType = KernelType.Rbf, Gamma = 0.5, Nu = 0.5, CacheSize = 128, Eps = 1e-3, Shrinking = true, Probability = false }; var problem = new SvmProblem { Y = trainData.Select(p => 1.0).ToArray(), X = trainData.Select(p => p.ToSvmNodes()).ToArray() }; parameters.Check(problem); var model = Svm.Train(problem, parameters); var x = new Point(0.9, 0.9).ToSvmNodes(); var resx = model.Predict(x); Console.WriteLine(resx); var y = new Point(0.5, 0.5).ToSvmNodes(); var resy = model.Predict(y); Console.WriteLine(resy); var z = new Point(0.45, 0.45).ToSvmNodes(); var resz = model.Predict(z); Console.WriteLine(resz); }
public static void Run() { Console.WriteLine("CSVMDemo"); var class1 = DemoHelper.GenerateClass(0, 0.1, 0.1, 50); var class2 = DemoHelper.GenerateClass(1, 0.8, 0.8, 50); var trainData = class1.Concat(class2); var parameters = new SvmParameter { SvmType = SvmType.C_SVC, KernelType = KernelType.Rbf, Gamma = 0.5, CacheSize = 128, C = 1, Eps = 1e-3, Shrinking = true, Probability = false }; var problem = new SvmProblem { Y = trainData.Select(p => (double)p.Label).ToArray(), X = trainData.Select(p => p.ToSvmNodes()).ToArray() }; parameters.Check(problem); var model = Svm.Train(problem, parameters); var x = new Point(0.9, 0.9).ToSvmNodes(); var resx = model.Predict(x); Console.WriteLine(resx); var y = new Point(0.1, 0.1).ToSvmNodes(); var resy = model.Predict(y); Console.WriteLine(resy); }
// Return parameter of a Laplace distribution private static double svm_svr_probability(SvmProblem prob, SvmParameter param) { int i; int nr_fold = 5; double[] ymv = new double[prob.Lenght]; double mae = 0; var newparam = (SvmParameter)param.Clone(); newparam.Probability = false; CrossValidation(prob, newparam, nr_fold, ymv); for (i = 0; i < prob.Lenght; i++) { ymv[i] = prob.Y[i] - ymv[i]; mae += Math.Abs(ymv[i]); } mae /= prob.Lenght; double std = Math.Sqrt(2 * mae * mae); int count = 0; mae = 0; for (i = 0; i < prob.Lenght; i++) { if (Math.Abs(ymv[i]) > 5 * std) { count = count + 1; } else { mae += Math.Abs(ymv[i]); } } mae /= (prob.Lenght - count); Svm.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + mae + "\n"); return(mae); }
public SvrQ(SvmProblem prob, SvmParameter param) : base(prob.Lenght, prob.X, param) { l = prob.Lenght; cache = new Cache(l, (long)(param.CacheSize * (1 << 20))); QD = new double[2 * l]; sign = new sbyte[2 * l]; index = new int[2 * l]; for (int k = 0; k < l; k++) { sign[k] = 1; sign[k + l] = -1; index[k] = k; index[k + l] = k; QD[k] = kernel_function(k, k); QD[k + l] = QD[k]; } buffer = new double[2][] { new double[2 * l], new double[2 * l] }; next_buffer = 0; }
private static void solve_nu_svr(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si) { int l = prob.Lenght; double C = param.C; double[] alpha2 = new double[2 * l]; double[] linear_term = new double[2 * l]; sbyte[] y = new sbyte[2 * l]; int i; double sum = C * param.Nu * l / 2; for (i = 0; i < l; i++) { alpha2[i] = alpha2[i + l] = Math.Min(sum, C); sum -= alpha2[i]; linear_term[i] = -prob.Y[i]; y[i] = 1; linear_term[i + l] = prob.Y[i]; y[i + l] = -1; } var s = new SolverNu(); s.Solve(2 * l, new SvrQ(prob, param), linear_term, y, alpha2, C, C, param.Eps, si, param.Shrinking); Svm.info("epsilon = " + (-si.R) + "\n"); for (i = 0; i < l; i++) { alpha[i] = alpha2[i] - alpha2[i + l]; } }
// Cross-validation decision values for probability estimates private static void svm_binary_svc_probability(SvmProblem prob, SvmParameter param, double Cp, double Cn, double[] probAB) { //int i; int nr_fold = 5; int[] perm = new int[prob.Lenght]; double[] dec_values = new double[prob.Lenght]; // random shuffle var rnd = new Random(); for (int i = 0; i < prob.Lenght; i++) { perm[i] = i; } for (int i = 0; i < prob.Lenght; i++) { int j = i + (int)(rnd.NextDouble() * (prob.Lenght - i)); //do { int _ = perm[i]; perm[i] = perm[j]; perm[j] = _; } while (false); Common.Swap(ref perm[i], ref perm[j]); } for (int i = 0; i < nr_fold; i++) { int begin = i * prob.Lenght / nr_fold; int end = (i + 1) * prob.Lenght / nr_fold; //int j; var subprobLenght = prob.Lenght - (end - begin); var subprob = new SvmProblem { X = new SvmNode[subprobLenght][], Y = new double[subprobLenght] }; int k = 0; for (int j = 0; j < begin; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } for (int j = end; j < prob.Lenght; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } int p_count = 0, n_count = 0; for (int j = 0; j < k; j++) { if (subprob.Y[j] > 0) { p_count++; } else { n_count++; } } if (p_count == 0 && n_count == 0) { for (int j = begin; j < end; j++) { dec_values[perm[j]] = 0; } } else if (p_count > 0 && n_count == 0) { for (int j = begin; j < end; j++) { dec_values[perm[j]] = 1; } } else if (p_count == 0 && n_count > 0) { for (int j = begin; j < end; j++) { dec_values[perm[j]] = -1; } } else { var subparam = (SvmParameter)param.Clone(); subparam.Probability = false; subparam.C = 1.0; subparam.WeightLabel = new int[2]; subparam.Weight = new double[2]; subparam.WeightLabel[0] = +1; subparam.WeightLabel[1] = -1; subparam.Weight[0] = Cp; subparam.Weight[1] = Cn; var submodel = Train(subprob, subparam); for (int j = begin; j < end; j++) { double[] dec_value = new double[1]; submodel.PredictValues(prob.X[perm[j]], dec_value); dec_values[perm[j]] = dec_value[0]; // ensure +1 -1 order; reason not using CV subroutine dec_values[perm[j]] *= submodel.Label[0]; } } } sigmoid_train(prob.Lenght, dec_values, prob.Y, probAB); }
// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data // perm, length l, must be allocated before calling this subroutine private static void svm_group_classes(SvmProblem prob, out int nr_class_ret, out int[] label_ret, out int[] start_ret, out int[] count_ret, int[] perm) { int l = prob.Lenght; int max_nr_class = 16; int nr_class = 0; int[] label = new int[max_nr_class]; int[] count = new int[max_nr_class]; int[] data_label = new int[l]; int i; for (i = 0; i < l; i++) { int this_label = (int)(prob.Y[i]); int j; for (j = 0; j < nr_class; j++) { if (this_label == label[j]) { ++count[j]; break; } } data_label[i] = j; if (j == nr_class) { if (nr_class == max_nr_class) { max_nr_class *= 2; int[] new_data = new int[max_nr_class]; Array.Copy(label, 0, new_data, 0, label.Length); label = new_data; new_data = new int[max_nr_class]; Array.Copy(count, 0, new_data, 0, count.Length); count = new_data; } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } int[] start = new int[nr_class]; start[0] = 0; for (i = 1; i < nr_class; i++) { start[i] = start[i - 1] + count[i - 1]; } for (i = 0; i < l; i++) { perm[start[data_label[i]]] = i; ++start[data_label[i]]; } start[0] = 0; for (i = 1; i < nr_class; i++) { start[i] = start[i - 1] + count[i - 1]; } nr_class_ret = nr_class; label_ret = label; start_ret = start; count_ret = count; }
// check whether nu-svc is feasible private void IsNuFeasible(SvmProblem prob) { if (!SvmType.IsNuSVC()) { return; } int l = prob.Lenght; int max_nr_class = 16; int nr_class = 0; int[] label = new int[max_nr_class]; int[] count = new int[max_nr_class]; int i; for (i = 0; i < l; i++) { int this_label = (int)prob.Y[i]; int j; for (j = 0; j < nr_class; j++) { if (this_label == label[j]) { ++count[j]; break; } } if (j == nr_class) { if (nr_class == max_nr_class) { max_nr_class *= 2; int[] new_data = new int[max_nr_class]; Array.Copy(label, 0, new_data, 0, label.Length); label = new_data; new_data = new int[max_nr_class]; Array.Copy(count, 0, new_data, 0, count.Length); count = new_data; } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } for (i = 0; i < nr_class; i++) { int n1 = count[i]; for (int j = i + 1; j < nr_class; j++) { int n2 = count[j]; if (this.Nu * (n1 + n2) / 2 > Math.Min(n1, n2)) { throw new Exception("specified nu is infeasible"); } } } }
private static void solve_one_class(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si) { int l = prob.Lenght; double[] zeros = new double[l]; sbyte[] ones = new sbyte[l]; int i; int n = (int)(param.Nu * prob.Lenght); // # of alpha's at upper bound for (i = 0; i < n; i++) alpha[i] = 1; if (n < prob.Lenght) alpha[n] = param.Nu * prob.Lenght - n; for (i = n + 1; i < l; i++) alpha[i] = 0; for (i = 0; i < l; i++) { zeros[i] = 0; ones[i] = 1; } var s = new Solver(); s.Solve(l, new OneClassQ(prob, param), zeros, ones, alpha, 1.0, 1.0, param.Eps, si, param.Shrinking); }
// Cross-validation decision values for probability estimates private static void svm_binary_svc_probability(SvmProblem prob, SvmParameter param, double Cp, double Cn, double[] probAB) { //int i; int nr_fold = 5; int[] perm = new int[prob.Lenght]; double[] dec_values = new double[prob.Lenght]; // random shuffle var rnd = new Random(); for (int i = 0; i < prob.Lenght; i++) perm[i] = i; for (int i = 0; i < prob.Lenght; i++) { int j = i + (int)(rnd.NextDouble() * (prob.Lenght - i)); //do { int _ = perm[i]; perm[i] = perm[j]; perm[j] = _; } while (false); Common.Swap(ref perm[i], ref perm[j]); } for (int i = 0; i < nr_fold; i++) { int begin = i * prob.Lenght / nr_fold; int end = (i + 1) * prob.Lenght / nr_fold; //int j; var subprobLenght = prob.Lenght - (end - begin); var subprob = new SvmProblem { X = new SvmNode[subprobLenght][], Y = new double[subprobLenght] }; int k = 0; for (int j = 0; j < begin; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } for (int j = end; j < prob.Lenght; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } int p_count = 0, n_count = 0; for (int j = 0; j < k; j++) if (subprob.Y[j] > 0) p_count++; else n_count++; if (p_count == 0 && n_count == 0) for (int j = begin; j < end; j++) dec_values[perm[j]] = 0; else if (p_count > 0 && n_count == 0) for (int j = begin; j < end; j++) dec_values[perm[j]] = 1; else if (p_count == 0 && n_count > 0) for (int j = begin; j < end; j++) dec_values[perm[j]] = -1; else { var subparam = (SvmParameter)param.Clone(); subparam.Probability = false; subparam.C = 1.0; subparam.WeightLabel = new int[2]; subparam.Weight = new double[2]; subparam.WeightLabel[0] = +1; subparam.WeightLabel[1] = -1; subparam.Weight[0] = Cp; subparam.Weight[1] = Cn; var submodel = Train(subprob, subparam); for (int j = begin; j < end; j++) { double[] dec_value = new double[1]; submodel.PredictValues(prob.X[perm[j]], dec_value); dec_values[perm[j]] = dec_value[0]; // ensure +1 -1 order; reason not using CV subroutine dec_values[perm[j]] *= submodel.Label[0]; } } } sigmoid_train(prob.Lenght, dec_values, prob.Y, probAB); }
// // Interface functions // public static SvmModel Train(SvmProblem prob, SvmParameter param) { var model = new SvmModel(); model.Param = param; if (param.SvmType.IsSVROrOneClass()) { // regression or one-class-svm model.NrClass = 2; model.Label = null; model.SupportVectorsNumbers = null; model.ProbA = null; model.ProbB = null; model.SupportVectorsCoefficients = new double[1][]; if (param.Probability && param.SvmType.IsSVR()) { model.ProbA = new double[1]; model.ProbA[0] = svm_svr_probability(prob, param); } DecisionFunction f = svm_train_one(prob, param, 0, 0); model.Rho = new double[1]; model.Rho[0] = f.Rho; int nSV = 0; int i; for (i = 0; i < prob.Lenght; i++) { if (Math.Abs(f.Alpha[i]) > 0) { ++nSV; } } model.TotalSupportVectorsNumber = nSV; model.SupportVectors = new SvmNode[nSV][]; model.SupportVectorsCoefficients[0] = new double[nSV]; int j = 0; for (i = 0; i < prob.Lenght; i++) { if (Math.Abs(f.Alpha[i]) > 0) { model.SupportVectors[j] = prob.X[i]; model.SupportVectorsCoefficients[0][j] = f.Alpha[i]; ++j; } } } else { // classification int l = prob.Lenght; int[] perm = new int[l]; int nr_class; int[] label; int[] start; int[] count; // group training data of the same class svm_group_classes(prob, out nr_class, out label, out start, out count, perm); if (nr_class == 1) { Svm.info("WARNING: training data in only one class. See README for details.\n"); } SvmNode[][] x = new SvmNode[l][]; int i; for (i = 0; i < l; i++) { x[i] = prob.X[perm[i]]; } // calculate weighted C double[] weighted_C = new double[nr_class]; for (i = 0; i < nr_class; i++) { weighted_C[i] = param.C; } for (i = 0; i < param.WeightsCount; i++) { int j; for (j = 0; j < nr_class; j++) { if (param.WeightLabel[i] == label[j]) { break; } } if (j == nr_class) { System.Diagnostics.Debug.WriteLine("WARNING: class label " + param.WeightLabel[i] + " specified in weight is not found\n"); } else { weighted_C[j] *= param.Weight[i]; } } // train k*(k-1)/2 models var nonzero = new bool[l]; for (i = 0; i < l; i++) { nonzero[i] = false; } var f = new DecisionFunction[nr_class * (nr_class - 1) / 2]; double[] probA = null, probB = null; if (param.Probability) { probA = new double[nr_class * (nr_class - 1) / 2]; probB = new double[nr_class * (nr_class - 1) / 2]; } int p = 0; for (i = 0; i < nr_class; i++) { for (int j = i + 1; j < nr_class; j++) { int si = start[i], sj = start[j]; int ci = count[i], cj = count[j]; var subprobLenght = ci + cj; var sub_prob = new SvmProblem { X = new SvmNode[subprobLenght][], Y = new double[subprobLenght] }; int k; for (k = 0; k < ci; k++) { sub_prob.X[k] = x[si + k]; sub_prob.Y[k] = +1; } for (k = 0; k < cj; k++) { sub_prob.X[ci + k] = x[sj + k]; sub_prob.Y[ci + k] = -1; } if (param.Probability) { double[] probAB = new double[2]; svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB); probA[p] = probAB[0]; probB[p] = probAB[1]; } f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]); for (k = 0; k < ci; k++) { if (!nonzero[si + k] && Math.Abs(f[p].Alpha[k]) > 0) { nonzero[si + k] = true; } } for (k = 0; k < cj; k++) { if (!nonzero[sj + k] && Math.Abs(f[p].Alpha[ci + k]) > 0) { nonzero[sj + k] = true; } } ++p; } } // build output model.NrClass = nr_class; model.Label = new int[nr_class]; for (i = 0; i < nr_class; i++) { model.Label[i] = label[i]; } model.Rho = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) { model.Rho[i] = f[i].Rho; } if (param.Probability) { model.ProbA = new double[nr_class * (nr_class - 1) / 2]; model.ProbB = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) { model.ProbA[i] = probA[i]; model.ProbB[i] = probB[i]; } } else { model.ProbA = null; model.ProbB = null; } int nnz = 0; int[] nz_count = new int[nr_class]; model.SupportVectorsNumbers = new int[nr_class]; for (i = 0; i < nr_class; i++) { int nSV = 0; for (int j = 0; j < count[i]; j++) { if (nonzero[start[i] + j]) { ++nSV; ++nnz; } } model.SupportVectorsNumbers[i] = nSV; nz_count[i] = nSV; } Svm.info("Total nSV = " + nnz + "\n"); model.TotalSupportVectorsNumber = nnz; model.SupportVectors = new SvmNode[nnz][]; p = 0; for (i = 0; i < l; i++) { if (nonzero[i]) { model.SupportVectors[p++] = x[i]; } } int[] nz_start = new int[nr_class]; nz_start[0] = 0; for (i = 1; i < nr_class; i++) { nz_start[i] = nz_start[i - 1] + nz_count[i - 1]; } model.SupportVectorsCoefficients = new double[nr_class - 1][]; for (i = 0; i < nr_class - 1; i++) { model.SupportVectorsCoefficients[i] = new double[nnz]; } p = 0; for (i = 0; i < nr_class; i++) { for (int j = i + 1; j < nr_class; j++) { // classifier (i,j): coefficients with // i are in sv_coef[j-1][nz_start[i]...], // j are in sv_coef[i][nz_start[j]...] int si = start[i]; int sj = start[j]; int ci = count[i]; int cj = count[j]; int q = nz_start[i]; int k; for (k = 0; k < ci; k++) { if (nonzero[si + k]) { model.SupportVectorsCoefficients[j - 1][q++] = f[p].Alpha[k]; } } q = nz_start[j]; for (k = 0; k < cj; k++) { if (nonzero[sj + k]) { model.SupportVectorsCoefficients[i][q++] = f[p].Alpha[ci + k]; } } ++p; } } } return(model); }
private static void solve_nu_svr(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si) { int l = prob.Lenght; double C = param.C; double[] alpha2 = new double[2 * l]; double[] linear_term = new double[2 * l]; sbyte[] y = new sbyte[2 * l]; int i; double sum = C * param.Nu * l / 2; for (i = 0; i < l; i++) { alpha2[i] = alpha2[i + l] = Math.Min(sum, C); sum -= alpha2[i]; linear_term[i] = -prob.Y[i]; y[i] = 1; linear_term[i + l] = prob.Y[i]; y[i + l] = -1; } var s = new SolverNu(); s.Solve(2 * l, new SvrQ(prob, param), linear_term, y, alpha2, C, C, param.Eps, si, param.Shrinking); Svm.info("epsilon = " + (-si.R) + "\n"); for (i = 0; i < l; i++) alpha[i] = alpha2[i] - alpha2[i + l]; }
// Stratified cross validation public static void CrossValidation(SvmProblem prob, SvmParameter param, int nr_fold, double[] target) { int i; int[] fold_start = new int[nr_fold + 1]; int l = prob.Lenght; int[] perm = new int[l]; // stratified cv may not give leave-one-out rate // Each class to l folds -> some folds may have zero elements if (param.SvmType.IsSVC() && nr_fold < l) { int nr_class; int[] tmp_label; int[] start; int[] count; svm_group_classes(prob, out nr_class, out tmp_label, out start, out count, perm); // random shuffle and then data grouped by fold using the array perm int[] fold_count = new int[nr_fold]; int c; int[] index = new int[l]; for (i = 0; i < l; i++) index[i] = perm[i]; var rnd = new Random(); for (c = 0; c < nr_class; c++) for (i = 0; i < count[c]; i++) { int j = i + (int)(rnd.NextDouble() * (count[c] - i)); Common.Swap(ref index[start[c] + j], ref index[start[c] + j]); } for (i = 0; i < nr_fold; i++) { fold_count[i] = 0; for (c = 0; c < nr_class; c++) fold_count[i] += (i + 1) * count[c] / nr_fold - i * count[c] / nr_fold; } fold_start[0] = 0; for (i = 1; i <= nr_fold; i++) fold_start[i] = fold_start[i - 1] + fold_count[i - 1]; for (c = 0; c < nr_class; c++) for (i = 0; i < nr_fold; i++) { int begin = start[c] + i * count[c] / nr_fold; int end = start[c] + (i + 1) * count[c] / nr_fold; for (int j = begin; j < end; j++) { perm[fold_start[i]] = index[j]; fold_start[i]++; } } fold_start[0] = 0; for (i = 1; i <= nr_fold; i++) fold_start[i] = fold_start[i - 1] + fold_count[i - 1]; } else { var rnd = new Random(); for (i = 0; i < l; i++) { perm[i] = i; } for (i = 0; i < l; i++) { int j = i + (int)(rnd.NextDouble() * (l - i)); Common.Swap(ref perm[i], ref perm[j]); } for (i = 0; i <= nr_fold; i++) { fold_start[i] = i * l / nr_fold; } } for (i = 0; i < nr_fold; i++) { int begin = fold_start[i]; int end = fold_start[i + 1]; int j, k; var subprobLenght = l - (end - begin); var subprob = new SvmProblem { X = new SvmNode[subprobLenght][], Y = new double[subprobLenght] }; k = 0; for (j = 0; j < begin; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } for (j = end; j < l; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } var submodel = Train(subprob, param); if (param.Probability && param.SvmType.IsSVC()) { double[] prob_estimates = new double[submodel.NrClass]; for (j = begin; j < end; j++) target[perm[j]] = submodel.PredictProbability(prob.X[perm[j]], prob_estimates); } else for (j = begin; j < end; j++) target[perm[j]] = submodel.Predict(prob.X[perm[j]]); } }
// // Interface functions // public static SvmModel Train(SvmProblem prob, SvmParameter param) { var model = new SvmModel(); model.Param = param; if (param.SvmType.IsSVROrOneClass()) { // regression or one-class-svm model.NrClass = 2; model.Label = null; model.SupportVectorsNumbers = null; model.ProbA = null; model.ProbB = null; model.SupportVectorsCoefficients = new double[1][]; if (param.Probability && param.SvmType.IsSVR()) { model.ProbA = new double[1]; model.ProbA[0] = svm_svr_probability(prob, param); } DecisionFunction f = svm_train_one(prob, param, 0, 0); model.Rho = new double[1]; model.Rho[0] = f.Rho; int nSV = 0; int i; for (i = 0; i < prob.Lenght; i++) if (Math.Abs(f.Alpha[i]) > 0) ++nSV; model.TotalSupportVectorsNumber = nSV; model.SupportVectors = new SvmNode[nSV][]; model.SupportVectorsCoefficients[0] = new double[nSV]; int j = 0; for (i = 0; i < prob.Lenght; i++) if (Math.Abs(f.Alpha[i]) > 0) { model.SupportVectors[j] = prob.X[i]; model.SupportVectorsCoefficients[0][j] = f.Alpha[i]; ++j; } } else { // classification int l = prob.Lenght; int[] perm = new int[l]; int nr_class; int[] label; int[] start; int[] count; // group training data of the same class svm_group_classes(prob, out nr_class, out label, out start, out count, perm); SvmNode[][] x = new SvmNode[l][]; int i; for (i = 0; i < l; i++) x[i] = prob.X[perm[i]]; // calculate weighted C double[] weighted_C = new double[nr_class]; for (i = 0; i < nr_class; i++) weighted_C[i] = param.C; for (i = 0; i < param.WeightsCount; i++) { int j; for (j = 0; j < nr_class; j++) if (param.WeightLabel[i] == label[j]) break; if (j == nr_class) Console.Error.WriteLine("warning: class label " + param.WeightLabel[i] + " specified in weight is not found\n"); else weighted_C[j] *= param.Weight[i]; } // train k*(k-1)/2 models var nonzero = new bool[l]; for (i = 0; i < l; i++) nonzero[i] = false; var f = new DecisionFunction[nr_class * (nr_class - 1) / 2]; double[] probA = null, probB = null; if (param.Probability) { probA = new double[nr_class * (nr_class - 1) / 2]; probB = new double[nr_class * (nr_class - 1) / 2]; } int p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { int si = start[i], sj = start[j]; int ci = count[i], cj = count[j]; var subprobLenght = ci + cj; var sub_prob = new SvmProblem { X = new SvmNode[subprobLenght][], Y = new double[subprobLenght] }; int k; for (k = 0; k < ci; k++) { sub_prob.X[k] = x[si + k]; sub_prob.Y[k] = +1; } for (k = 0; k < cj; k++) { sub_prob.X[ci + k] = x[sj + k]; sub_prob.Y[ci + k] = -1; } if (param.Probability) { double[] probAB = new double[2]; svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB); probA[p] = probAB[0]; probB[p] = probAB[1]; } f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]); for (k = 0; k < ci; k++) if (!nonzero[si + k] && Math.Abs(f[p].Alpha[k]) > 0) nonzero[si + k] = true; for (k = 0; k < cj; k++) if (!nonzero[sj + k] && Math.Abs(f[p].Alpha[ci + k]) > 0) nonzero[sj + k] = true; ++p; } // build output model.NrClass = nr_class; model.Label = new int[nr_class]; for (i = 0; i < nr_class; i++) model.Label[i] = label[i]; model.Rho = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) model.Rho[i] = f[i].Rho; if (param.Probability) { model.ProbA = new double[nr_class * (nr_class - 1) / 2]; model.ProbB = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) { model.ProbA[i] = probA[i]; model.ProbB[i] = probB[i]; } } else { model.ProbA = null; model.ProbB = null; } int nnz = 0; int[] nz_count = new int[nr_class]; model.SupportVectorsNumbers = new int[nr_class]; for (i = 0; i < nr_class; i++) { int nSV = 0; for (int j = 0; j < count[i]; j++) if (nonzero[start[i] + j]) { ++nSV; ++nnz; } model.SupportVectorsNumbers[i] = nSV; nz_count[i] = nSV; } Svm.info("Total nSV = " + nnz + "\n"); model.TotalSupportVectorsNumber = nnz; model.SupportVectors = new SvmNode[nnz][]; p = 0; for (i = 0; i < l; i++) if (nonzero[i]) model.SupportVectors[p++] = x[i]; int[] nz_start = new int[nr_class]; nz_start[0] = 0; for (i = 1; i < nr_class; i++) nz_start[i] = nz_start[i - 1] + nz_count[i - 1]; model.SupportVectorsCoefficients = new double[nr_class - 1][]; for (i = 0; i < nr_class - 1; i++) model.SupportVectorsCoefficients[i] = new double[nnz]; p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { // classifier (i,j): coefficients with // i are in sv_coef[j-1][nz_start[i]...], // j are in sv_coef[i][nz_start[j]...] int si = start[i]; int sj = start[j]; int ci = count[i]; int cj = count[j]; int q = nz_start[i]; int k; for (k = 0; k < ci; k++) if (nonzero[si + k]) model.SupportVectorsCoefficients[j - 1][q++] = f[p].Alpha[k]; q = nz_start[j]; for (k = 0; k < cj; k++) if (nonzero[sj + k]) model.SupportVectorsCoefficients[i][q++] = f[p].Alpha[ci + k]; ++p; } } return model; }
private static DecisionFunction svm_train_one(SvmProblem prob, SvmParameter param, double Cp, double Cn) { double[] alpha = new double[prob.Lenght]; var si = new SolutionInfo(); switch (param.SvmType) { case SvmType.C_SVC: solve_c_svc(prob, param, alpha, si, Cp, Cn); break; case SvmType.NU_SVC: solve_nu_svc(prob, param, alpha, si); break; case SvmType.ONE_CLASS: solve_one_class(prob, param, alpha, si); break; case SvmType.EPSILON_SVR: solve_epsilon_svr(prob, param, alpha, si); break; case SvmType.NU_SVR: solve_nu_svr(prob, param, alpha, si); break; } Svm.info("obj = " + si.Obj + ", rho = " + si.Rho + "\n"); // output SVs int nSV = 0; int nBSV = 0; for (int i = 0; i < prob.Lenght; i++) { if (Math.Abs(alpha[i]) > 0) { ++nSV; if (prob.Y[i] > 0) { if (Math.Abs(alpha[i]) >= si.UpperBoundP) ++nBSV; } else { if (Math.Abs(alpha[i]) >= si.UpperBoundN) ++nBSV; } } } Svm.info("nSV = " + nSV + ", nBSV = " + nBSV + "\n"); var f = new DecisionFunction(alpha, si.Rho); return f; }
// Return parameter of a Laplace distribution private static double svm_svr_probability(SvmProblem prob, SvmParameter param) { int i; int nr_fold = 5; double[] ymv = new double[prob.Lenght]; double mae = 0; var newparam = (SvmParameter)param.Clone(); newparam.Probability = false; CrossValidation(prob, newparam, nr_fold, ymv); for (i = 0; i < prob.Lenght; i++) { ymv[i] = prob.Y[i] - ymv[i]; mae += Math.Abs(ymv[i]); } mae /= prob.Lenght; double std = Math.Sqrt(2 * mae * mae); int count = 0; mae = 0; for (i = 0; i < prob.Lenght; i++) if (Math.Abs(ymv[i]) > 5 * std) count = count + 1; else mae += Math.Abs(ymv[i]); mae /= (prob.Lenght - count); Svm.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + mae + "\n"); return mae; }
private static void solve_nu_svc(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si) { int i; int l = prob.Lenght; double nu = param.Nu; sbyte[] y = new sbyte[l]; for (i = 0; i < l; i++) { if (prob.Y[i] > 0) { y[i] = +1; } else { y[i] = -1; } } double sum_pos = nu * l / 2; double sum_neg = nu * l / 2; for (i = 0; i < l; i++) { if (y[i] == +1) { alpha[i] = Math.Min(1.0, sum_pos); sum_pos -= alpha[i]; } else { alpha[i] = Math.Min(1.0, sum_neg); sum_neg -= alpha[i]; } } double[] zeros = new double[l]; for (i = 0; i < l; i++) { zeros[i] = 0; } var s = new SolverNu(); s.Solve(l, new SvcQ(prob, param, y), zeros, y, alpha, 1.0, 1.0, param.Eps, si, param.Shrinking); double r = si.R; Svm.info("C = " + 1 / r + "\n"); for (i = 0; i < l; i++) { alpha[i] *= y[i] / r; } si.Rho /= r; si.Obj /= (r * r); si.UpperBoundP = 1 / r; si.UpperBoundN = 1 / r; }
private static DecisionFunction svm_train_one(SvmProblem prob, SvmParameter param, double Cp, double Cn) { double[] alpha = new double[prob.Lenght]; var si = new SolutionInfo(); switch (param.SvmType) { case SvmType.C_SVC: solve_c_svc(prob, param, alpha, si, Cp, Cn); break; case SvmType.NU_SVC: solve_nu_svc(prob, param, alpha, si); break; case SvmType.ONE_CLASS: solve_one_class(prob, param, alpha, si); break; case SvmType.EPSILON_SVR: solve_epsilon_svr(prob, param, alpha, si); break; case SvmType.NU_SVR: solve_nu_svr(prob, param, alpha, si); break; } Svm.info("obj = " + si.Obj + ", rho = " + si.Rho + "\n"); // output SVs int nSV = 0; int nBSV = 0; for (int i = 0; i < prob.Lenght; i++) { if (Math.Abs(alpha[i]) > 0) { ++nSV; if (prob.Y[i] > 0) { if (Math.Abs(alpha[i]) >= si.UpperBoundP) { ++nBSV; } } else { if (Math.Abs(alpha[i]) >= si.UpperBoundN) { ++nBSV; } } } } Svm.info("nSV = " + nSV + ", nBSV = " + nBSV + "\n"); var f = new DecisionFunction(alpha, si.Rho); return(f); }
private static void solve_nu_svc(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si) { int i; int l = prob.Lenght; double nu = param.Nu; sbyte[] y = new sbyte[l]; for (i = 0; i < l; i++) if (prob.Y[i] > 0) y[i] = +1; else y[i] = -1; double sum_pos = nu * l / 2; double sum_neg = nu * l / 2; for (i = 0; i < l; i++) if (y[i] == +1) { alpha[i] = Math.Min(1.0, sum_pos); sum_pos -= alpha[i]; } else { alpha[i] = Math.Min(1.0, sum_neg); sum_neg -= alpha[i]; } double[] zeros = new double[l]; for (i = 0; i < l; i++) zeros[i] = 0; var s = new SolverNu(); s.Solve(l, new SvcQ(prob, param, y), zeros, y, alpha, 1.0, 1.0, param.Eps, si, param.Shrinking); double r = si.R; Svm.info("C = " + 1 / r + "\n"); for (i = 0; i < l; i++) alpha[i] *= y[i] / r; si.Rho /= r; si.Obj /= (r * r); si.UpperBoundP = 1 / r; si.UpperBoundN = 1 / r; }
protected SvmModel TrainSvmModel(SvmProblem problem) { _parameters.Check(problem); var model = LibSvm.Svm.Train(problem, _parameters); return model; }
// check whether nu-svc is feasible private void IsNuFeasible(SvmProblem prob) { if (!SvmType.IsNuSVC()) { return; } int l = prob.Lenght; int max_nr_class = 16; int nr_class = 0; int[] label = new int[max_nr_class]; int[] count = new int[max_nr_class]; int i; for (i = 0; i < l; i++) { int this_label = (int) prob.Y[i]; int j; for (j = 0; j < nr_class; j++) if (this_label == label[j]) { ++count[j]; break; } if (j == nr_class) { if (nr_class == max_nr_class) { max_nr_class *= 2; int[] new_data = new int[max_nr_class]; Array.Copy(label, 0, new_data, 0, label.Length); label = new_data; new_data = new int[max_nr_class]; Array.Copy(count, 0, new_data, 0, count.Length); count = new_data; } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } for (i = 0; i < nr_class; i++) { int n1 = count[i]; for (int j = i + 1; j < nr_class; j++) { int n2 = count[j]; if (this.Nu*(n1 + n2)/2 > Math.Min(n1, n2)) throw new ApplicationException("specified nu is infeasible"); } } }
// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data // perm, length l, must be allocated before calling this subroutine private static void svm_group_classes(SvmProblem prob, out int nr_class_ret, out int[] label_ret, out int[] start_ret, out int[] count_ret, int[] perm) { int l = prob.Lenght; int max_nr_class = 16; int nr_class = 0; int[] label = new int[max_nr_class]; int[] count = new int[max_nr_class]; int[] data_label = new int[l]; int i; for (i = 0; i < l; i++) { int this_label = (int)(prob.Y[i]); int j; for (j = 0; j < nr_class; j++) { if (this_label == label[j]) { ++count[j]; break; } } data_label[i] = j; if (j == nr_class) { if (nr_class == max_nr_class) { max_nr_class *= 2; int[] new_data = new int[max_nr_class]; Array.Copy(label, 0, new_data, 0, label.Length); label = new_data; new_data = new int[max_nr_class]; Array.Copy(count, 0, new_data, 0, count.Length); count = new_data; } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } int[] start = new int[nr_class]; start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + count[i - 1]; for (i = 0; i < l; i++) { perm[start[data_label[i]]] = i; ++start[data_label[i]]; } start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + count[i - 1]; nr_class_ret = nr_class; label_ret = label; start_ret = start; count_ret = count; }
// Stratified cross validation public static void CrossValidation(SvmProblem prob, SvmParameter param, int nr_fold, double[] target) { int i; int[] fold_start = new int[nr_fold + 1]; int l = prob.Lenght; int[] perm = new int[l]; // stratified cv may not give leave-one-out rate // Each class to l folds -> some folds may have zero elements if (param.SvmType.IsSVC() && nr_fold < l) { int nr_class; int[] tmp_label; int[] start; int[] count; svm_group_classes(prob, out nr_class, out tmp_label, out start, out count, perm); // random shuffle and then data grouped by fold using the array perm int[] fold_count = new int[nr_fold]; int c; int[] index = new int[l]; for (i = 0; i < l; i++) { index[i] = perm[i]; } var rnd = new Random(); for (c = 0; c < nr_class; c++) { for (i = 0; i < count[c]; i++) { int j = i + (int)(rnd.NextDouble() * (count[c] - i)); Common.Swap(ref index[start[c] + j], ref index[start[c] + j]); } } for (i = 0; i < nr_fold; i++) { fold_count[i] = 0; for (c = 0; c < nr_class; c++) { fold_count[i] += (i + 1) * count[c] / nr_fold - i * count[c] / nr_fold; } } fold_start[0] = 0; for (i = 1; i <= nr_fold; i++) { fold_start[i] = fold_start[i - 1] + fold_count[i - 1]; } for (c = 0; c < nr_class; c++) { for (i = 0; i < nr_fold; i++) { int begin = start[c] + i * count[c] / nr_fold; int end = start[c] + (i + 1) * count[c] / nr_fold; for (int j = begin; j < end; j++) { perm[fold_start[i]] = index[j]; fold_start[i]++; } } } fold_start[0] = 0; for (i = 1; i <= nr_fold; i++) { fold_start[i] = fold_start[i - 1] + fold_count[i - 1]; } } else { var rnd = new Random(); for (i = 0; i < l; i++) { perm[i] = i; } for (i = 0; i < l; i++) { int j = i + (int)(rnd.NextDouble() * (l - i)); Common.Swap(ref perm[i], ref perm[j]); } for (i = 0; i <= nr_fold; i++) { fold_start[i] = i * l / nr_fold; } } for (i = 0; i < nr_fold; i++) { int begin = fold_start[i]; int end = fold_start[i + 1]; int j, k; var subprobLenght = l - (end - begin); var subprob = new SvmProblem { X = new SvmNode[subprobLenght][], Y = new double[subprobLenght] }; k = 0; for (j = 0; j < begin; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } for (j = end; j < l; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } var submodel = Train(subprob, param); if (param.Probability && param.SvmType.IsSVC()) { double[] prob_estimates = new double[submodel.NrClass]; for (j = begin; j < end; j++) { target[perm[j]] = submodel.PredictProbability(prob.X[perm[j]], prob_estimates); } } else { for (j = begin; j < end; j++) { target[perm[j]] = submodel.Predict(prob.X[perm[j]]); } } } }