/// <summary> /// Default SVM /// </summary> /// <remarks>The class store svm parameters and create the model. /// This way, you can use it to predict</remarks> public SVM(svm_problem prob, svm_parameter param) { var error = svm.svm_check_parameter(prob, param); if (error != null) { throw new Exception(error); } this.prob = prob; this.param = param; this.Train(); }
public bool LoadFromFile(string fileName) { if (File.Exists(fileName)) { FileStream fs = new FileStream(fileName, FileMode.Open); using (BinaryReader r = new BinaryReader(fs)) { this.model = new svm_model(); svm_parameter p = new svm_parameter(); p.C = r.ReadDouble(); p.cache_size = r.ReadDouble(); p.coef0 = r.ReadDouble(); p.degree = r.ReadDouble(); p.eps = r.ReadDouble(); p.gamma = r.ReadDouble(); p.kernel_type = r.ReadInt32(); p.nr_weight = r.ReadInt32(); p.nu = r.ReadDouble(); p.p = r.ReadDouble(); p.probability = r.ReadInt32(); p.shrinking = r.ReadInt32(); p.svm_type = r.ReadInt32(); p.weight = ReadDoubleArray(r); p.weight_label = ReadIntArray(r); this.model.param = p; this.model.nr_class = r.ReadInt32(); this.model.l = r.ReadInt32(); this.model.SV = ReadSvmNodeArray(r); this.model.sv_coef = ReadDouble2DArray(r); this.model.rho = ReadDoubleArray(r); this.model.probA = ReadDoubleArray(r); this.model.probB = ReadDoubleArray(r); this.model.label = ReadIntArray(r); this.model.nSV = ReadIntArray(r); return true; } } this.model = null; return false; }
static double CrossValidate(long randomSeed, double C) { var training = Create1vs1Problem(trainingData, 1, 5); var config = new svm_parameter() { svm_type = (int)SvmType.C_SVC, kernel_type = (int)KernelType.POLY, C = C, degree = 2, coef0 = 1, gamma = 1, eps = 0.001 }; double[] result = new double[training.l]; svm.rand.setSeed(randomSeed); svm.svm_cross_validation(training, config, 10, result); return (result.Zip(training.y, (v, u) => Math.Sign(v) != Math.Sign(u) ? 1 : 0).Sum() + 0.0) / result.Length; }
private static void solve_one_class(svm_problem prob, svm_parameter param, double[] alpha, Solver.SolutionInfo si) { int l = prob.l; double[] zeros = new double[l]; sbyte[] ones = new sbyte[l]; int i; //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' int n = (int) (param.nu * prob.l); // # of alpha's at upper bound for (i = 0; i < n; i++) alpha[i] = 1; alpha[n] = param.nu * prob.l - n; for (i = n + 1; i < l; i++) alpha[i] = 0; for (i = 0; i < l; i++) { zeros[i] = 0; ones[i] = 1; } Solver s = new Solver(); s.Solve(l, new ONE_CLASS_Q(prob, param), zeros, ones, alpha, 1.0, 1.0, param.eps, si, param.shrinking); }
// // Interface functions // public static svm_model svm_train(svm_problem prob, svm_parameter param, TrainingProgressEvent progressEvent = null) { svm_model model = new svm_model(); model.param = param; if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR) { // regression or one-class-svm model.nr_class = 2; model.label = null; model.nSV = null; model.probA = null; model.probB = null; model.sv_coef = new double[1][]; if (param.probability == 1 && (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR)) { model.probA = new double[1]; model.probA[0] = svm_svr_probability(prob, param); } decision_function f = svm_train_one(prob, param, 0, 0); model.rho = new double[1]; model.rho[0] = f.rho; int nSV = 0; int i; for (i = 0; i < prob.l; i++) if (System.Math.Abs(f.alpha[i]) > 0) ++nSV; model.l = nSV; model.SV = new svm_node[nSV][]; model.sv_coef[0] = new double[nSV]; int j = 0; for (i = 0; i < prob.l; i++) if (System.Math.Abs(f.alpha[i]) > 0) { model.SV[j] = prob.x[i]; model.sv_coef[0][j] = f.alpha[i]; ++j; } } else { // classification // find out the number of classes int l = prob.l; int max_nr_class = 16; int nr_class = 0; int[] label = new int[max_nr_class]; int[] count = new int[max_nr_class]; int[] index = new int[l]; int i; for (i = 0; i < l; i++) { //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' int this_label = (int)prob.y[i]; int j; for (j = 0; j < nr_class; j++) if (this_label == label[j]) { ++count[j]; break; } index[i] = j; if (j == nr_class) { if (nr_class == max_nr_class) { max_nr_class *= 2; int[] new_data = new int[max_nr_class]; Array.Copy(label, 0, new_data, 0, label.Length); label = new_data; new_data = new int[max_nr_class]; Array.Copy(count, 0, new_data, 0, count.Length); count = new_data; } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } // group training data of the same class int[] start = new int[nr_class]; start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + count[i - 1]; svm_node[][] x = new svm_node[l][]; for (i = 0; i < l; i++) { x[start[index[i]]] = prob.x[i]; ++start[index[i]]; } start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + count[i - 1]; // calculate weighted C double[] weighted_C = new double[nr_class]; for (i = 0; i < nr_class; i++) weighted_C[i] = param.C; for (i = 0; i < param.nr_weight; i++) { int j; for (j = 0; j < nr_class; j++) if (param.weight_label[i] == label[j]) break; if (j == nr_class) System.Console.Error.Write("warning: class label " + param.weight_label[i] + " specified in weight is not found\n"); else weighted_C[j] *= param.weight[i]; } // train k*(k-1)/2 models bool[] nonzero = new bool[l]; for (i = 0; i < l; i++) nonzero[i] = false; decision_function[] f = new decision_function[nr_class * (nr_class - 1) / 2]; double[] probA = null, probB = null; if (param.probability == 1) { probA = new double[nr_class * (nr_class - 1) / 2]; probB = new double[nr_class * (nr_class - 1) / 2]; } int p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { svm_problem sub_prob = new svm_problem(); int si = start[i], sj = start[j]; int ci = count[i], cj = count[j]; sub_prob.l = ci + cj; sub_prob.x = new svm_node[sub_prob.l][]; sub_prob.y = new double[sub_prob.l]; int k; for (k = 0; k < ci; k++) { sub_prob.x[k] = x[si + k]; sub_prob.y[k] = +1; } for (k = 0; k < cj; k++) { sub_prob.x[ci + k] = x[sj + k]; sub_prob.y[ci + k] = -1; } if (param.probability == 1) { double[] probAB = new double[2]; svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB); probA[p] = probAB[0]; probB[p] = probAB[1]; } f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]); for (k = 0; k < ci; k++) if (!nonzero[si + k] && System.Math.Abs(f[p].alpha[k]) > 0) nonzero[si + k] = true; for (k = 0; k < cj; k++) if (!nonzero[sj + k] && System.Math.Abs(f[p].alpha[ci + k]) > 0) nonzero[sj + k] = true; ++p; } // build output model.nr_class = nr_class; model.label = new int[nr_class]; for (i = 0; i < nr_class; i++) model.label[i] = label[i]; model.rho = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) model.rho[i] = f[i].rho; if (param.probability == 1) { model.probA = new double[nr_class * (nr_class - 1) / 2]; model.probB = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) { model.probA[i] = probA[i]; model.probB[i] = probB[i]; } } else { model.probA = null; model.probB = null; } int nnz = 0; int[] nz_count = new int[nr_class]; model.nSV = new int[nr_class]; for (i = 0; i < nr_class; i++) { int nSV = 0; for (int j = 0; j < count[i]; j++) if (nonzero[start[i] + j]) { ++nSV; ++nnz; } model.nSV[i] = nSV; nz_count[i] = nSV; } //Debug.WriteLine("Total nSV = " + nnz + "\n"); model.l = nnz; model.SV = new svm_node[nnz][]; p = 0; for (i = 0; i < l; i++) if (nonzero[i]) model.SV[p++] = x[i]; int[] nz_start = new int[nr_class]; nz_start[0] = 0; for (i = 1; i < nr_class; i++) nz_start[i] = nz_start[i - 1] + nz_count[i - 1]; model.sv_coef = new double[nr_class - 1][]; for (i = 0; i < nr_class - 1; i++) model.sv_coef[i] = new double[nnz]; p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { // classifier (i,j): coefficients with // i are in sv_coef[j-1][nz_start[i]...], // j are in sv_coef[i][nz_start[j]...] int si = start[i]; int sj = start[j]; int ci = count[i]; int cj = count[j]; int q = nz_start[i]; int k; for (k = 0; k < ci; k++) if (nonzero[si + k]) model.sv_coef[j - 1][q++] = f[p].alpha[k]; q = nz_start[j]; for (k = 0; k < cj; k++) if (nonzero[sj + k]) model.sv_coef[i][q++] = f[p].alpha[ci + k]; ++p; } } return model; }
internal SVR_Q(svm_problem prob, svm_parameter param) : base(prob.l, prob.x, param) { l = prob.l; //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' cache = new Cache(l, (int) (param.cache_size * (1 << 20))); sign = new sbyte[2 * l]; index = new int[2 * l]; for (int k = 0; k < l; k++) { sign[k] = 1; sign[k + l] = - 1; index[k] = k; index[k + l] = k; } buffer = new float[2][]; for (int i = 0; i < 2; i++) { buffer[i] = new float[2 * l]; } next_buffer = 0; }
public static svm_model svm_load_model(System.String model_file_name) { //UPGRADE_TODO: The differences in the expected value of parameters for constructor 'java.io.BufferedReader.BufferedReader' may cause compilation errors. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1092_3"' //UPGRADE_WARNING: At least one expression was used more than once in the target code. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1181_3"' //UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1073_3"' /*Original System.IO.StreamReader fp = new System.IO.StreamReader(new System.IO.StreamReader(model_file_name, System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(model_file_name, System.Text.Encoding.Default).CurrentEncoding);*/ System.IO.StreamReader fp = new System.IO.StreamReader(new System.IO.FileStream(model_file_name, System.IO.FileMode.Open)); // read parameters svm_model model = new svm_model(); svm_parameter param = new svm_parameter(); model.param = param; model.rho = null; model.probA = null; model.probB = null; model.label = null; model.nSV = null; while (true) { System.String cmd = fp.ReadLine(); System.String arg = cmd.Substring(cmd.IndexOf((System.Char) ' ') + 1); if (cmd.StartsWith("svm_type")) { int i; for (i = 0; i < svm_type_table.Length; i++) { if (arg.IndexOf(svm_type_table[i]) != - 1) { param.svm_type = i; break; } } if (i == svm_type_table.Length) { System.Console.Error.Write("unknown svm type.\n"); return null; } } else if (cmd.StartsWith("kernel_type")) { int i; for (i = 0; i < kernel_type_table.Length; i++) { if (arg.IndexOf(kernel_type_table[i]) != - 1) { param.kernel_type = i; break; } } if (i == kernel_type_table.Length) { System.Console.Error.Write("unknown kernel function.\n"); return null; } } else if (cmd.StartsWith("degree")) param.degree = atof(arg); else if (cmd.StartsWith("gamma")) param.gamma = atof(arg); else if (cmd.StartsWith("coef0")) param.coef0 = atof(arg); else if (cmd.StartsWith("nr_class")) model.nr_class = atoi(arg); else if (cmd.StartsWith("total_sv")) model.l = atoi(arg); else if (cmd.StartsWith("rho")) { int n = model.nr_class * (model.nr_class - 1) / 2; model.rho = new double[n]; SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.rho[i] = atof(st.NextToken()); } else if (cmd.StartsWith("label")) { int n = model.nr_class; model.label = new int[n]; SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.label[i] = atoi(st.NextToken()); } else if (cmd.StartsWith("probA")) { int n = model.nr_class * (model.nr_class - 1) / 2; model.probA = new double[n]; SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.probA[i] = atof(st.NextToken()); } else if (cmd.StartsWith("probB")) { int n = model.nr_class * (model.nr_class - 1) / 2; model.probB = new double[n]; SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.probB[i] = atof(st.NextToken()); } else if (cmd.StartsWith("nr_sv")) { int n = model.nr_class; model.nSV = new int[n]; SupportClass.Tokenizer st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.nSV[i] = atoi(st.NextToken()); } else if (cmd.StartsWith("SV")) { break; } else { System.Console.Error.Write("unknown text in model file\n"); return null; } } // read sv_coef and SV int m = model.nr_class - 1; int l = model.l; model.sv_coef = new double[m][]; for (int i = 0; i < m; i++) { model.sv_coef[i] = new double[l]; } model.SV = new svm_node[l][]; for (int i = 0; i < l; i++) { System.String line = fp.ReadLine(); SupportClass.Tokenizer st = new SupportClass.Tokenizer(line, " \t\n\r\f:"); for (int k = 0; k < m; k++) model.sv_coef[k][i] = atof(st.NextToken()); int n = st.Count / 2; model.SV[i] = new svm_node[n]; for (int j = 0; j < n; j++) { model.SV[i][j] = new svm_node(); model.SV[i][j].index = atoi(st.NextToken()); model.SV[i][j].value = atof(st.NextToken()); } } fp.Close(); return model; }
internal static double k_function(svm_node[] x, svm_node[] y, svm_parameter param) { switch (param.kernel_type) { case svm_parameter.LINEAR: return dot(x, y); case svm_parameter.POLY: return System.Math.Pow(param.gamma * dot(x, y) + param.coef0, param.degree); case svm_parameter.RBF: { double sum = 0; int xlen = x.Length; int ylen = y.Length; int i = 0; int j = 0; while (i < xlen && j < ylen) { if (x[i].index == y[j].index) { double d = x[i++].value - y[j++].value; sum += d * d; } else if (x[i].index > y[j].index) { sum += y[j].value * y[j].value; ++j; } else { sum += x[i].value * x[i].value; ++i; } } while (i < xlen) { sum += x[i].value * x[i].value; ++i; } while (j < ylen) { sum += y[j].value * y[j].value; ++j; } return System.Math.Exp((- param.gamma) * sum); } case svm_parameter.SIGMOID: return tanh(param.gamma * dot(x, y) + param.coef0); default: return 0; // java } }
public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target) { int i; int[] perm = new int[prob.l]; // random shuffle for (i = 0; i < prob.l; i++) perm[i] = i; for (i = 0; i < prob.l; i++) { //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' int j = i + (int) (SupportClass.Random.NextDouble() * (prob.l - i)); do { int _ = perm[i]; perm[i] = perm[j]; perm[j] = _; } while (false); } for (i = 0; i < nr_fold; i++) { int begin = i * prob.l / nr_fold; int end = (i + 1) * prob.l / nr_fold; int j, k; svm_problem subprob = new svm_problem(); subprob.l = prob.l - (end - begin); subprob.x = new svm_node[subprob.l][]; subprob.y = new double[subprob.l]; k = 0; for (j = 0; j < begin; j++) { subprob.x[k] = prob.x[perm[j]]; subprob.y[k] = prob.y[perm[j]]; ++k; } for (j = end; j < prob.l; j++) { subprob.x[k] = prob.x[perm[j]]; subprob.y[k] = prob.y[perm[j]]; ++k; } svm_model submodel = svm_train(subprob, param); if (param.probability == 1 && (param.svm_type == svm_parameter.C_SVC || param.svm_type == svm_parameter.NU_SVC)) { double[] prob_estimates = new double[svm_get_nr_class(submodel)]; for (j = begin; j < end; j++) target[perm[j]] = svm_predict_probability(submodel, prob.x[perm[j]], prob_estimates); } else for (j = begin; j < end; j++) target[perm[j]] = svm_predict(submodel, prob.x[perm[j]]); } }
//Các giá trị của Type: // 0: shuffle // 1: not shuffle // 2: time series public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target, int type) { int i; int[] perm = new int[prob.l]; for (i = 0; i < prob.l; i++) perm[i] = i; if (nr_fold == 1) //Leave One Out { nr_fold = prob.l; } if (type == 0) //Shuffle { // random shuffle for (i = 0; i < prob.l; i++) { //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' int j = i + (int)(SupportClass.Random.NextDouble() * (prob.l - i)); do { int _ = perm[i]; perm[i] = perm[j]; perm[j] = _; } while (false); } } int iNumElementsPerFold = prob.l / nr_fold; if (type == 2) { i = 1; } else { i = 0; } for (; i < nr_fold; i++) { //int begin = i*iNumElementsPerFold; //int end = (i + 1) * iNumElementsPerFold; int begin = i * prob.l / nr_fold; int end = (i + 1) * prob.l / nr_fold; int j, k; svm_problem subprob = new svm_problem(); if (type == 2) { subprob.l = begin; } else { subprob.l = prob.l - (end - begin); } subprob.x = new svm_node[subprob.l][]; subprob.y = new double[subprob.l]; k = 0; for (j = 0; j < begin; j++) { subprob.x[k] = prob.x[perm[j]]; subprob.y[k] = prob.y[perm[j]]; ++k; } if (type != 2) { for (j = end; j < prob.l; j++) { subprob.x[k] = prob.x[perm[j]]; subprob.y[k] = prob.y[perm[j]]; ++k; } } svm_model submodel = svm_train(subprob, param); for (j = begin; j < end; j++) { if (type == 2) { target[perm[j] - iNumElementsPerFold] = svm_predict(submodel, prob.x[perm[j]]); } else { target[perm[j]] = svm_predict(submodel, prob.x[perm[j]]); } } } //int i; //int[] perm = new int[prob.l]; //for (i = 0; i < prob.l; i++) // perm[i] = i; //if (nr_fold == 1) //Leave One Out //{ // nr_fold = prob.l; //} //else //{ // // random shuffle // //for (i = 0; i < prob.l; i++) // //{ // // //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' // // int j = i + (int) (SupportClass.Random.NextDouble()*(prob.l - i)); // // do // // { // // int _ = perm[i]; // // perm[i] = perm[j]; // // perm[j] = _; // // } while (false); // //} //} //for (i = 0; i < nr_fold; i++) //{ // int begin = i * prob.l / nr_fold; // int end = (i + 1) * prob.l / nr_fold; // int j, k; // svm_problem subprob = new svm_problem(); // subprob.l = prob.l - (end - begin); // subprob.x = new svm_node[subprob.l][]; // subprob.y = new double[subprob.l]; // k = 0; // for (j = 0; j < begin; j++) // { // subprob.x[k] = prob.x[perm[j]]; // subprob.y[k] = prob.y[perm[j]]; // ++k; // } // for (j = end; j < prob.l; j++) // { // subprob.x[k] = prob.x[perm[j]]; // subprob.y[k] = prob.y[perm[j]]; // ++k; // } // svm_model submodel = svm_train(subprob, param); // if (param.probability == 1 && // (param.svm_type == svm_parameter.C_SVC || param.svm_type == svm_parameter.NU_SVC)) // { // double[] prob_estimates = new double[svm_get_nr_class(submodel)]; // for (j = begin; j < end; j++) // target[perm[j]] = svm_predict_probability(submodel, prob.x[perm[j]], prob_estimates); // } // else // for (j = begin; j < end; j++) // target[perm[j]] = svm_predict(submodel, prob.x[perm[j]]); //} }
public void TrainModel(double[] labels, double[][] mlArray) { SvmProblemBuilder builder = new SvmProblemBuilder(labels, mlArray); svm_problem problem = builder.CreateProblem(); svm_parameter param = new svm_parameter() { svm_type = 0, kernel_type = 0, cache_size = 512, eps = 0.1, C = 10, nr_weight = 0, weight_label = null, weight = null }; this.model = svm.svm_train(problem, param); }
/// <summary> /// Default SVM /// </summary> /// <remarks>The class store svm parameters and create the model. /// This way, you can use it to predict</remarks> public SVM(string input_file_name, svm_parameter param) : this(ProblemHelper.ReadProblem(input_file_name), param) { }
public void TrainLibSVM(double[][] vektoren, double[] labels, double currentC, double currentG, out int errorCount) { int nrdocs = vektoren.Length; svm_problem prob = new svm_problem(); prob.l = vektoren.Length - 1; prob.y = labels; svm_node[][] nodes = new svm_node[nrdocs][]; for (int i = 0; i < vektoren.Length; i++) { int dim = vektoren[i].Length; nodes[i] = new svm_node[dim + 1]; for (int j = 0; j < dim; j++) { svm_node n = new svm_node(); n.index = j; n.value_Renamed = vektoren[i][j]; nodes[i][j] = n; } svm_node ln = new svm_node(); ln.index = -1; ln.value_Renamed = 0; nodes[i][dim] = ln; } prob.x = nodes; svm_parameter param = new svm_parameter(); param.cache_size = 256.0; param.C = 1000.0; //param.weight = new double[] { 1.0, 1.0, 1.0, 1.0, 1.0 }; //param.weight_label = new int[] { 1, 1, 1, 1, 1 }; param.svm_type = svm_parameter.C_SVC; param.kernel_type = svm_parameter.SIGMOID; param.gamma = 0.00000001; param.eps = 0.0001; //param.nr_weight = 0; param.probability = 1; //double[] cs; //double[] gs; double[] cergs = new double[labels.Length]; int minfehler = labels.Length; int fehler = 0; double c = 0.0; double g = 0.0; #region Parameterabstimmung //cs = new double[] { Math.Pow(2.0, -15.0), Math.Pow(2.0, -11.0), Math.Pow(2.0, -9.0), Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0), Math.Pow(2.0, 5.0), Math.Pow(2.0, 7.0), Math.Pow(2.0, 12.0), Math.Pow(2.0, 15.0) }; //gs = new double[] { Math.Pow(2.0, -15.0), Math.Pow(2.0, -12.0), Math.Pow(2.0, -9.0), Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0) }; //cs = new double[] { Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0), Math.Pow(2.0, 5.0), Math.Pow(2.0, 7.0), Math.Pow(2.0, 12.0) }; //gs = new double[] { Math.Pow(2.0, -7.0), Math.Pow(2.0, -5.0), Math.Pow(2.0, -3.0), Math.Pow(2.0, -1.0), Math.Pow(2.0, 1.0), Math.Pow(2.0, 3.0) }; //for (int i = 0; i < cs.Length; i++) //{ // param.C = cs[i]; // for (int j = 0; j < gs.Length; j++) // { // fehler = 0; // param.gamma = gs[j]; // string res = svm.svm_check_parameter(prob, param); // if (res == null) // { // svm.svm_cross_validation(prob, param, vektoren.Length/4, cergs); // for (int k = 0; k < labels.Length; k++) // { // if (cergs[k] != labels[k]) // fehler++; // } // if (fehler < minfehler) // { // minfehler = fehler; // c = param.C; // g = param.gamma; // } // } // } //} param.C = currentC; fehler = 0; param.gamma = currentG; string res = svm.svm_check_parameter(prob, param); if (res == null) { svm.svm_cross_validation(prob, param, vektoren.Length / 4, cergs); for (int k = 0; k < labels.Length; k++) { if (cergs[k] != labels[k]) { fehler++; } } if (fehler < minfehler) { minfehler = fehler; c = param.C; g = param.gamma; } } #endregion #region Feinabstimmung //cs = new double[] { c * 0.3, c * 0.4, c * 0.5, c * 0.6, c * 0.7, c * 0.8, c * 0.9, c, c * 2.0, c * 3.0, c * 4.0, c * 5.0, c * 6.0 }; //gs = new double[] { g * 0.5, g * 0.6, g * 0.7, g * 0.8, g * 0.9, g, g * 2.0, g * 3.0, g * 4.0 }; double[] csF = new double[] { c * 0.6, c * 0.7, c * 0.8, c * 0.9, c, c * 2.0, c * 3.0 }; double[] gsF = new double[] { g * 0.7, g * 0.8, g * 0.9, g, g * 2.0, g * 3.0 }; for (int i = 0; i < csF.Length; i++) { param.C = csF[i]; for (int j = 0; j < gsF.Length; j++) { fehler = 0; param.gamma = gsF[j]; res = svm.svm_check_parameter(prob, param); if (res == null) { svm.svm_cross_validation(prob, param, vektoren.Length / 4, cergs); for (int k = 0; k < labels.Length; k++) { if (cergs[k] != labels[k]) { fehler++; } } if (fehler < minfehler) { minfehler = fehler; c = param.C; g = param.gamma; } } //Thread.Sleep(1); } //Thread.Sleep(10); } #endregion #region Superfeinabstimmung //cs = new double[] { c - 7.0, c - 6.0, c - 5.0, c - 4.0, c - 3.0, c - 2.0, c - 1.0, c, c + 1.0, c + 2.0, c + 3.0, c + 4.0, c + 5.0 }; //gs = new double[] { g - 5.0, g - 4.0, g - 3.0, g - 2.0, g - 1.0, g, g + 1.0, g + 2.0, g + 3.0 }; /*cs = new double[] { c - 1.0, c - 0.3, c - 0.1, c, c + 0.1, c + 0.3, c + 1.0, }; * gs = new double[] { g - 1.0, g - 0.3, g - 0.1, g, g + 0.1, g + 0.3, g + 1.0 }; * for (int i = 0; i < cs.Length; i++) * { * param.C = cs[i]; * * for (int j = 0; j < gs.Length; j++) * { * fehler = 0; * param.gamma = gs[j]; * string res = svm.svm_check_parameter(prob, param); * if (res == null) * { * svm.svm_cross_validation(prob, param, 6, cergs); * * for (int k = 0; k < labels.Length; k++) * { * if (cergs[k] != labels[k]) * fehler++; * } * if (fehler < minfehler) * { * minfehler = fehler; * c = param.C; * g = param.gamma; * } * } * } * }*/ #endregion param.C = c; param.gamma = g; this._model = new svm_model(); this._model = svm.svm_train(prob, param); int anzKlassen = svm.svm_get_nr_class(this._model); double[] probs = new double[anzKlassen]; double erg; erg = svm.svm_predict_probability(this._model, nodes[0], probs); //erg = svm.svm_predict_probability(this._model, nodes[11], probs); //klazzifiziere(this.testvektor); //klazzifiziere(vektoren[6]); errorCount = minfehler; }
// Cross-validation decision values for probability estimates private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB) { int i; int nr_fold = 5; int[] perm = new int[prob.l]; double[] dec_values = new double[prob.l]; // random shuffle for (i = 0; i < prob.l; i++) perm[i] = i; for (i = 0; i < prob.l; i++) { //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' int j = i + (int) (SupportClass.Random.NextDouble() * (prob.l - i)); do { int _ = perm[i]; perm[i] = perm[j]; perm[j] = _; } while (false); } for (i = 0; i < nr_fold; i++) { int begin = i * prob.l / nr_fold; int end = (i + 1) * prob.l / nr_fold; int j, k; svm_problem subprob = new svm_problem(); subprob.l = prob.l - (end - begin); subprob.x = new svm_node[subprob.l][]; subprob.y = new double[subprob.l]; k = 0; for (j = 0; j < begin; j++) { subprob.x[k] = prob.x[perm[j]]; subprob.y[k] = prob.y[perm[j]]; ++k; } for (j = end; j < prob.l; j++) { subprob.x[k] = prob.x[perm[j]]; subprob.y[k] = prob.y[perm[j]]; ++k; } int p_count = 0, n_count = 0; for (j = 0; j < k; j++) if (subprob.y[j] > 0) p_count++; else n_count++; if (p_count == 0 && n_count == 0) for (j = begin; j < end; j++) dec_values[perm[j]] = 0; else if (p_count > 0 && n_count == 0) for (j = begin; j < end; j++) dec_values[perm[j]] = 1; else if (p_count == 0 && n_count > 0) for (j = begin; j < end; j++) dec_values[perm[j]] = - 1; else { svm_parameter subparam = (svm_parameter) param.Clone(); subparam.probability = 0; subparam.C = 1.0; subparam.nr_weight = 2; subparam.weight_label = new int[2]; subparam.weight = new double[2]; subparam.weight_label[0] = + 1; subparam.weight_label[1] = - 1; subparam.weight[0] = Cp; subparam.weight[1] = Cn; svm_model submodel = svm_train(subprob, subparam); for (j = begin; j < end; j++) { double[] dec_value = new double[1]; svm_predict_values(submodel, prob.x[perm[j]], dec_value); dec_values[perm[j]] = dec_value[0]; // ensure +1 -1 order; reason not using CV subroutine dec_values[perm[j]] *= submodel.label[0]; } } } sigmoid_train(prob.l, dec_values, prob.y, probAB); }
// Return parameter of a Laplace distribution private static double svm_svr_probability(svm_problem prob, svm_parameter param) { int i; int nr_fold = 5; double[] ymv = new double[prob.l]; double mae = 0; svm_parameter newparam = (svm_parameter) param.Clone(); newparam.probability = 0; svm_cross_validation(prob, newparam, nr_fold, ymv); for (i = 0; i < prob.l; i++) { ymv[i] = prob.y[i] - ymv[i]; mae += System.Math.Abs(ymv[i]); } mae /= prob.l; double std = System.Math.Sqrt(2 * mae * mae); int count = 0; mae = 0; for (i = 0; i < prob.l; i++) if (System.Math.Abs(ymv[i]) > 5 * std) count = count + 1; else mae += System.Math.Abs(ymv[i]); mae /= (prob.l - count); System.Console.Error.Write("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + mae + "\n"); return mae; }
private void parse_command_line(System.String[] argv) { int i; param = new svm_parameter(); // default values param.svm_type = svm_parameter.C_SVC; param.kernel_type = svm_parameter.RBF; param.degree = 3; param.gamma = 0; // 1/k param.coef0 = 0; param.nu = 0.5; param.cache_size = 40; param.C = 1; param.eps = 1e-3; param.p = 0.1; param.shrinking = 1; param.probability = 0; param.nr_weight = 0; param.weight_label = new int[0]; param.weight = new double[0]; // parse options for (i = 0; i < argv.Length; i++) { if (argv[i][0] != '-') break; ++i; switch (argv[i - 1][1]) { case 's': param.svm_type = atoi(argv[i]); break; case 't': param.kernel_type = atoi(argv[i]); break; case 'd': param.degree = atof(argv[i]); break; case 'g': param.gamma = atof(argv[i]); break; case 'r': param.coef0 = atof(argv[i]); break; case 'n': param.nu = atof(argv[i]); break; case 'm': param.cache_size = atof(argv[i]); break; case 'c': param.C = atof(argv[i]); break; case 'e': param.eps = atof(argv[i]); break; case 'p': param.p = atof(argv[i]); break; case 'h': param.shrinking = atoi(argv[i]); break; case 'b': param.probability = atoi(argv[i]); break; case 'v': cross_validation = 1; nr_fold = atoi(argv[i]); if (nr_fold < 2) { System.Console.Error.Write("n-fold cross validation: n must >= 2\n"); exit_with_help(); } break; case 'w': ++param.nr_weight; { int[] old = param.weight_label; param.weight_label = new int[param.nr_weight]; Array.Copy(old, 0, param.weight_label, 0, param.nr_weight - 1); } { double[] old = param.weight; param.weight = new double[param.nr_weight]; Array.Copy(old, 0, param.weight, 0, param.nr_weight - 1); } param.weight_label[param.nr_weight - 1] = atoi(argv[i - 1].Substring(2)); param.weight[param.nr_weight - 1] = atof(argv[i]); break; default: System.Console.Error.Write("unknown option\n"); exit_with_help(); break; } } // determine filenames if (i >= argv.Length) exit_with_help(); input_file_name = argv[i]; if (i < argv.Length - 1) model_file_name = argv[i + 1]; else { int p = argv[i].LastIndexOf((System.Char) '/'); ++p; // whew... model_file_name = argv[i].Substring(p) + ".model"; } }
internal Kernel(int l, svm_node[][] x_, svm_parameter param) { this.kernel_type = param.kernel_type; this.degree = param.degree; this.gamma = param.gamma; this.coef0 = param.coef0; x = (svm_node[][]) x_.Clone(); if (kernel_type == svm_parameter.RBF) { x_square = new double[l]; for (int i = 0; i < l; i++) x_square[i] = dot(x[i], x[i]); } else x_square = null; }
// // construct and solve various formulations // private static void solve_c_svc(svm_problem prob, svm_parameter param, double[] alpha, Solver.SolutionInfo si, double Cp, double Cn) { int l = prob.l; double[] minus_ones = new double[l]; sbyte[] y = new sbyte[l]; int i; for (i = 0; i < l; i++) { alpha[i] = 0; minus_ones[i] = - 1; if (prob.y[i] > 0) y[i] = (sbyte) (+ 1); else y[i] = - 1; } Solver s = new Solver(); s.Solve(l, new SVC_Q(prob, param, y), minus_ones, y, alpha, Cp, Cn, param.eps, si, param.shrinking); double sum_alpha = 0; for (i = 0; i < l; i++) sum_alpha += alpha[i]; //if (Cp == Cn) //Debug.WriteLine("nu = " + sum_alpha / (Cp * prob.l) + "\n"); for (i = 0; i < l; i++) alpha[i] *= y[i]; }
internal ONE_CLASS_Q(svm_problem prob, svm_parameter param) : base(prob.l, prob.x, param) { //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' cache = new Cache(prob.l, (int) (param.cache_size * (1 << 20))); }
private static void solve_epsilon_svr(svm_problem prob, svm_parameter param, double[] alpha, Solver.SolutionInfo si) { int l = prob.l; double[] alpha2 = new double[2 * l]; double[] linear_term = new double[2 * l]; sbyte[] y = new sbyte[2 * l]; int i; for (i = 0; i < l; i++) { alpha2[i] = 0; linear_term[i] = param.p - prob.y[i]; y[i] = 1; alpha2[i + l] = 0; linear_term[i + l] = param.p + prob.y[i]; y[i + l] = - 1; } Solver s = new Solver(); s.Solve(2 * l, new SVR_Q(prob, param), linear_term, y, alpha2, param.C, param.C, param.eps, si, param.shrinking); double sum_alpha = 0; for (i = 0; i < l; i++) { alpha[i] = alpha2[i] - alpha2[i + l]; sum_alpha += System.Math.Abs(alpha[i]); } //Debug.WriteLine("nu = " + sum_alpha / (param.C * l) + "\n"); }
internal SVC_Q(svm_problem prob, svm_parameter param, sbyte[] y_) : base(prob.l, prob.x, param) { y = new sbyte[y_.Length]; y_.CopyTo(y, 0); //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' cache = new Cache(prob.l, (int) (param.cache_size * (1 << 20))); }
private static void solve_nu_svc(svm_problem prob, svm_parameter param, double[] alpha, Solver.SolutionInfo si) { int i; int l = prob.l; double nu = param.nu; sbyte[] y = new sbyte[l]; for (i = 0; i < l; i++) if (prob.y[i] > 0) y[i] = (sbyte) (+ 1); else y[i] = - 1; double sum_pos = nu * l / 2; double sum_neg = nu * l / 2; for (i = 0; i < l; i++) if (y[i] == + 1) { alpha[i] = System.Math.Min(1.0, sum_pos); sum_pos -= alpha[i]; } else { alpha[i] = System.Math.Min(1.0, sum_neg); sum_neg -= alpha[i]; } double[] zeros = new double[l]; for (i = 0; i < l; i++) zeros[i] = 0; Solver_NU s = new Solver_NU(); s.Solve(l, new SVC_Q(prob, param, y), zeros, y, alpha, 1.0, 1.0, param.eps, si, param.shrinking); double r = si.r; //Debug.WriteLine("C = " + 1 / r + "\n"); for (i = 0; i < l; i++) alpha[i] *= y[i] / r; si.rho /= r; si.obj /= (r * r); si.upper_bound_p = 1 / r; si.upper_bound_n = 1 / r; }
public static System.String svm_check_parameter(svm_problem prob, svm_parameter param) { // svm_type int svm_type = param.svm_type; if (svm_type != svm_parameter.C_SVC && svm_type != svm_parameter.NU_SVC && svm_type != svm_parameter.ONE_CLASS && svm_type != svm_parameter.EPSILON_SVR && svm_type != svm_parameter.NU_SVR) return "unknown svm type"; // kernel_type int kernel_type = param.kernel_type; if (kernel_type != svm_parameter.LINEAR && kernel_type != svm_parameter.POLY && kernel_type != svm_parameter.RBF && kernel_type != svm_parameter.SIGMOID) return "unknown kernel type"; // cache_size,eps,C,nu,p,shrinking if (param.cache_size <= 0) return "cache_size <= 0"; if (param.eps <= 0) return "eps <= 0"; if (svm_type == svm_parameter.C_SVC || svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR) if (param.C <= 0) return "C <= 0"; if (svm_type == svm_parameter.NU_SVC || svm_type == svm_parameter.ONE_CLASS || svm_type == svm_parameter.NU_SVR) if (param.nu < 0 || param.nu > 1) return "nu < 0 or nu > 1"; if (svm_type == svm_parameter.EPSILON_SVR) if (param.p < 0) return "p < 0"; if (param.shrinking != 0 && param.shrinking != 1) return "shrinking != 0 and shrinking != 1"; if (param.probability != 0 && param.probability != 1) return "probability != 0 and probability != 1"; if (param.probability == 1 && svm_type == svm_parameter.ONE_CLASS) return "one-class SVM probability output not supported yet"; // check whether nu-svc is feasible if (svm_type == svm_parameter.NU_SVC) { int l = prob.l; int max_nr_class = 16; int nr_class = 0; int[] label = new int[max_nr_class]; int[] count = new int[max_nr_class]; int i; for (i = 0; i < l; i++) { //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' int this_label = (int) prob.y[i]; int j; for (j = 0; j < nr_class; j++) if (this_label == label[j]) { ++count[j]; break; } if (j == nr_class) { if (nr_class == max_nr_class) { max_nr_class *= 2; int[] new_data = new int[max_nr_class]; Array.Copy(label, 0, new_data, 0, label.Length); label = new_data; new_data = new int[max_nr_class]; Array.Copy(count, 0, new_data, 0, count.Length); count = new_data; } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } for (i = 0; i < nr_class; i++) { int n1 = count[i]; for (int j = i + 1; j < nr_class; j++) { int n2 = count[j]; if (param.nu * (n1 + n2) / 2 > System.Math.Min(n1, n2)) return "specified nu is infeasible"; } } } return null; }
private static void solve_nu_svr(svm_problem prob, svm_parameter param, double[] alpha, Solver.SolutionInfo si) { int l = prob.l; double C = param.C; double[] alpha2 = new double[2 * l]; double[] linear_term = new double[2 * l]; sbyte[] y = new sbyte[2 * l]; int i; double sum = C * param.nu * l / 2; for (i = 0; i < l; i++) { alpha2[i] = alpha2[i + l] = System.Math.Min(sum, C); sum -= alpha2[i]; linear_term[i] = - prob.y[i]; y[i] = 1; linear_term[i + l] = prob.y[i]; y[i + l] = - 1; } Solver_NU s = new Solver_NU(); s.Solve(2 * l, new SVR_Q(prob, param), linear_term, y, alpha2, C, C, param.eps, si, param.shrinking); //Debug.WriteLine("epsilon = " + (- si.r) + "\n"); for (i = 0; i < l; i++) alpha[i] = alpha2[i] - alpha2[i + l]; }
internal static decision_function svm_train_one(svm_problem prob, svm_parameter param, double Cp, double Cn) { double[] alpha = new double[prob.l]; Solver.SolutionInfo si = new Solver.SolutionInfo(); switch (param.svm_type) { case svm_parameter.C_SVC: solve_c_svc(prob, param, alpha, si, Cp, Cn); break; case svm_parameter.NU_SVC: solve_nu_svc(prob, param, alpha, si); break; case svm_parameter.ONE_CLASS: solve_one_class(prob, param, alpha, si); break; case svm_parameter.EPSILON_SVR: solve_epsilon_svr(prob, param, alpha, si); break; case svm_parameter.NU_SVR: solve_nu_svr(prob, param, alpha, si); break; } //Debug.WriteLine("obj = " + si.obj + ", rho = " + si.rho + "\n"); // output SVs int nSV = 0; int nBSV = 0; for (int i = 0; i < prob.l; i++) { if (System.Math.Abs(alpha[i]) > 0) { ++nSV; if (prob.y[i] > 0) { if (System.Math.Abs(alpha[i]) >= si.upper_bound_p) ++nBSV; } else { if (System.Math.Abs(alpha[i]) >= si.upper_bound_n) ++nBSV; } } } //Debug.WriteLine("nSV = " + nSV + ", nBSV = " + nBSV + "\n"); decision_function f = new decision_function(); f.alpha = alpha; f.rho = si.rho; return f; }