public SupportVectorMachine(svm_model theModel) { svm_node[][] sV; int num; svm_node[] _nodeArray3; int num2; this._model = theModel; this._paras = this._model.param; if (8 != 0) { goto Label_00A8; } return; Label_002C: if (num < sV.Length) { _nodeArray3 = sV[num]; num2 = 0; } else { if ((0 == 0) && ((((uint) num2) + ((uint) num2)) > uint.MaxValue)) { goto Label_002C; } if ((((uint) num2) - ((uint) num2)) >= 0) { return; } goto Label_00A8; } Label_005F: while (num2 < _nodeArray3.Length) { svm_node _node = _nodeArray3[num2]; this._inputCount = Math.Max(_node.index, this._inputCount); num2++; } num++; goto Label_002C; Label_00A8: this._inputCount = 0; sV = this._model.SV; num = 0; if (((uint) num2) < 0) { goto Label_005F; } goto Label_002C; }
public static void svm_save_model(StreamWriter fp, svm_model model) { svm_parameter param = model.param; fp.Write("svm_type " + svm_type_table[param.svm_type] + "\n"); fp.Write("kernel_type " + kernel_type_table[param.kernel_type] + "\n"); if (param.kernel_type == svm_parameter.POLY) fp.Write("degree " + param.degree + "\n"); if (param.kernel_type == svm_parameter.POLY || param.kernel_type == svm_parameter.RBF || param.kernel_type == svm_parameter.SIGMOID) fp.Write("gamma " + param.gamma + "\n"); if (param.kernel_type == svm_parameter.POLY || param.kernel_type == svm_parameter.SIGMOID) fp.Write("coef0 " + param.coef0 + "\n"); int nr_class = model.nr_class; int l = model.l; fp.Write("nr_class " + nr_class + "\n"); fp.Write("total_sv " + l + "\n"); { fp.Write("rho"); for (int i = 0; i < nr_class*(nr_class - 1)/2; i++) fp.Write(" " + model.rho[i]); fp.Write("\n"); } if (model.label != null) { fp.Write("label"); for (int i = 0; i < nr_class; i++) fp.Write(" " + model.label[i]); fp.Write("\n"); } if (model.probA != null) // regression has probA only { fp.Write("probA"); for (int i = 0; i < nr_class*(nr_class - 1)/2; i++) fp.Write(" " + model.probA[i]); fp.Write("\n"); } if (model.probB != null) { fp.Write("probB"); for (int i = 0; i < nr_class*(nr_class - 1)/2; i++) fp.Write(" " + model.probB[i]); fp.Write("\n"); } if (model.nSV != null) { fp.Write("nr_sv"); for (int i = 0; i < nr_class; i++) fp.Write(" " + model.nSV[i]); fp.Write("\n"); } fp.Write("SV\n"); double[][] sv_coef = model.sv_coef; svm_node[][] SV = model.SV; for (int i = 0; i < l; i++) { for (int j = 0; j < nr_class - 1; j++) fp.Write(sv_coef[j][i] + " "); svm_node[] p = SV[i]; for (int j = 0; j < p.Length; j++) fp.Write(p[j].index + ":" + p[j].value_Renamed + " "); fp.Write("\n"); } fp.Close(); }
public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates) { if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) && model.probA != null && model.probB != null) { int i; int nr_class = model.nr_class; var dec_values = new double[nr_class*(nr_class - 1)/2]; svm_predict_values(model, x, dec_values); double min_prob = 1e-7; var tmpArray = new double[nr_class][]; for (int i2 = 0; i2 < nr_class; i2++) { tmpArray[i2] = new double[nr_class]; } double[][] pairwise_prob = tmpArray; int k = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { pairwise_prob[i][j] = Math.Min( Math.Max(sigmoid_predict(dec_values[k], model.probA[k], model.probB[k]), min_prob), 1 - min_prob); pairwise_prob[j][i] = 1 - pairwise_prob[i][j]; k++; } multiclass_probability(nr_class, pairwise_prob, prob_estimates); int prob_max_idx = 0; for (i = 1; i < nr_class; i++) if (prob_estimates[i] > prob_estimates[prob_max_idx]) prob_max_idx = i; return model.label[prob_max_idx]; } else return svm_predict(model, x); }
public static double svm_predict(svm_model model, svm_node[] x) { if (model.param.svm_type == svm_parameter.ONE_CLASS || model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) { var res = new double[1]; svm_predict_values(model, x, res); if (model.param.svm_type == svm_parameter.ONE_CLASS) return (res[0] > 0) ? 1 : - 1; else return res[0]; } else { int i; int nr_class = model.nr_class; var dec_values = new double[nr_class*(nr_class - 1)/2]; svm_predict_values(model, x, dec_values); var vote = new int[nr_class]; for (i = 0; i < nr_class; i++) vote[i] = 0; int pos = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { if (dec_values[pos++] > 0) ++vote[i]; else ++vote[j]; } int vote_max_idx = 0; for (i = 1; i < nr_class; i++) if (vote[i] > vote[vote_max_idx]) vote_max_idx = i; return model.label[vote_max_idx]; } }
public static void svm_predict_values(svm_model model, svm_node[] x, double[] dec_values) { if (model.param.svm_type == svm_parameter.ONE_CLASS || model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) { double[] sv_coef = model.sv_coef[0]; double sum = 0; for (int i = 0; i < model.l; i++) sum += sv_coef[i]*Kernel.k_function(x, model.SV[i], model.param); sum -= model.rho[0]; dec_values[0] = sum; } else { int i; int nr_class = model.nr_class; int l = model.l; var kvalue = new double[l]; for (i = 0; i < l; i++) kvalue[i] = Kernel.k_function(x, model.SV[i], model.param); var start = new int[nr_class]; start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + model.nSV[i - 1]; int p = 0; int pos = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { double sum = 0; int si = start[i]; int sj = start[j]; int ci = model.nSV[i]; int cj = model.nSV[j]; int k; double[] coef1 = model.sv_coef[j - 1]; double[] coef2 = model.sv_coef[i]; for (k = 0; k < ci; k++) sum += coef1[si + k]*kvalue[si + k]; for (k = 0; k < cj; k++) sum += coef2[sj + k]*kvalue[sj + k]; sum -= model.rho[p++]; dec_values[pos++] = sum; } } }
public static double svm_get_svr_probability(svm_model model) { if ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) && model.probA != null) return model.probA[0]; else { Console.Error.Write("Model doesn't contain information for SVR probability inference\n"); return 0; } }
public static void svm_get_labels(svm_model model, int[] label) { if (model.label != null) for (int i = 0; i < model.nr_class; i++) label[i] = model.label[i]; }
/// <summary> /// Load the models. /// </summary> /// <param name="xmlin">Where to read the models from.</param> /// <param name="network">Where the models are read into.</param> private void HandleModels(ReadXML xmlin, SVMNetwork network) { int index = 0; while (xmlin.ReadToTag()) { if (xmlin.IsIt(SVMNetworkPersistor.TAG_MODEL, true)) { svm_parameter param = new svm_parameter(); svm_model model = new svm_model(); model.param = param; network.Models[index] = model; HandleModel(xmlin, network.Models[index]); index++; } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_MODELS, false)) { break; } } }
public static int svm_get_svm_type(svm_model model) { return model.param.svm_type; }
// // Interface functions // public static svm_model svm_train(svm_problem prob, svm_parameter param) { var model = new svm_model(); model.param = param; if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR) { // regression or one-class-svm model.nr_class = 2; model.label = null; model.nSV = null; model.probA = null; model.probB = null; model.sv_coef = new double[1][]; if (param.probability == 1 && (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR)) { model.probA = new double[1]; model.probA[0] = svm_svr_probability(prob, param); } decision_function f = svm_train_one(prob, param, 0, 0); model.rho = new double[1]; model.rho[0] = f.rho; int nSV = 0; int i; for (i = 0; i < prob.l; i++) if (Math.Abs(f.alpha[i]) > 0) ++nSV; model.l = nSV; model.SV = new svm_node[nSV][]; model.sv_coef[0] = new double[nSV]; int j = 0; for (i = 0; i < prob.l; i++) if (Math.Abs(f.alpha[i]) > 0) { model.SV[j] = prob.x[i]; model.sv_coef[0][j] = f.alpha[i]; ++j; } } else { // classification // find out the number of classes int l = prob.l; int max_nr_class = 16; int nr_class = 0; var label = new int[max_nr_class]; var count = new int[max_nr_class]; var index = new int[l]; int i; for (i = 0; i < l; i++) { //UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"' var this_label = (int) prob.y[i]; int j; for (j = 0; j < nr_class; j++) if (this_label == label[j]) { ++count[j]; break; } index[i] = j; if (j == nr_class) { if (nr_class == max_nr_class) { max_nr_class *= 2; var new_data = new int[max_nr_class]; Array.Copy(label, 0, new_data, 0, label.Length); label = new_data; new_data = new int[max_nr_class]; Array.Copy(count, 0, new_data, 0, count.Length); count = new_data; } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } // group training data of the same class var start = new int[nr_class]; start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + count[i - 1]; var x = new svm_node[l][]; for (i = 0; i < l; i++) { x[start[index[i]]] = prob.x[i]; ++start[index[i]]; } start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + count[i - 1]; // calculate weighted C var weighted_C = new double[nr_class]; for (i = 0; i < nr_class; i++) weighted_C[i] = param.C; for (i = 0; i < param.nr_weight; i++) { int j; for (j = 0; j < nr_class; j++) if (param.weight_label[i] == label[j]) break; if (j == nr_class) Console.Error.Write("warning: class label " + param.weight_label[i] + " specified in weight is not found\n"); else weighted_C[j] *= param.weight[i]; } // train k*(k-1)/2 models var nonzero = new bool[l]; for (i = 0; i < l; i++) nonzero[i] = false; var f = new decision_function[nr_class*(nr_class - 1)/2]; double[] probA = null, probB = null; if (param.probability == 1) { probA = new double[nr_class*(nr_class - 1)/2]; probB = new double[nr_class*(nr_class - 1)/2]; } int p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { var sub_prob = new svm_problem(); int si = start[i], sj = start[j]; int ci = count[i], cj = count[j]; sub_prob.l = ci + cj; sub_prob.x = new svm_node[sub_prob.l][]; sub_prob.y = new double[sub_prob.l]; int k; for (k = 0; k < ci; k++) { sub_prob.x[k] = x[si + k]; sub_prob.y[k] = + 1; } for (k = 0; k < cj; k++) { sub_prob.x[ci + k] = x[sj + k]; sub_prob.y[ci + k] = - 1; } if (param.probability == 1) { var probAB = new double[2]; svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB); probA[p] = probAB[0]; probB[p] = probAB[1]; } f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]); for (k = 0; k < ci; k++) if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0) nonzero[si + k] = true; for (k = 0; k < cj; k++) if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0) nonzero[sj + k] = true; ++p; } // build output model.nr_class = nr_class; model.label = new int[nr_class]; for (i = 0; i < nr_class; i++) model.label[i] = label[i]; model.rho = new double[nr_class*(nr_class - 1)/2]; for (i = 0; i < nr_class*(nr_class - 1)/2; i++) model.rho[i] = f[i].rho; if (param.probability == 1) { model.probA = new double[nr_class*(nr_class - 1)/2]; model.probB = new double[nr_class*(nr_class - 1)/2]; for (i = 0; i < nr_class*(nr_class - 1)/2; i++) { model.probA[i] = probA[i]; model.probB[i] = probB[i]; } } else { model.probA = null; model.probB = null; } int nnz = 0; var nz_count = new int[nr_class]; model.nSV = new int[nr_class]; for (i = 0; i < nr_class; i++) { int nSV = 0; for (int j = 0; j < count[i]; j++) if (nonzero[start[i] + j]) { ++nSV; ++nnz; } model.nSV[i] = nSV; nz_count[i] = nSV; } Console.Out.Write("Total nSV = " + nnz + "\n"); model.l = nnz; model.SV = new svm_node[nnz][]; p = 0; for (i = 0; i < l; i++) if (nonzero[i]) model.SV[p++] = x[i]; var nz_start = new int[nr_class]; nz_start[0] = 0; for (i = 1; i < nr_class; i++) nz_start[i] = nz_start[i - 1] + nz_count[i - 1]; model.sv_coef = new double[nr_class - 1][]; for (i = 0; i < nr_class - 1; i++) model.sv_coef[i] = new double[nnz]; p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { // classifier (i,j): coefficients with // i are in sv_coef[j-1][nz_start[i]...], // j are in sv_coef[i][nz_start[j]...] int si = start[i]; int sj = start[j]; int ci = count[i]; int cj = count[j]; int q = nz_start[i]; int k; for (k = 0; k < ci; k++) if (nonzero[si + k]) model.sv_coef[j - 1][q++] = f[p].alpha[k]; q = nz_start[j]; for (k = 0; k < cj; k++) if (nonzero[sj + k]) model.sv_coef[i][q++] = f[p].alpha[ci + k]; ++p; } } return model; }
/// <summary> /// Construct a SVM from a model. /// </summary> /// /// <param name="theModel">The model.</param> public SupportVectorMachine(svm_model theModel) { _model = theModel; _paras = _model.param; _inputCount = 0; // determine the input count foreach (var element in _model.SV) { foreach (svm_node t in element) { _inputCount = Math.Max(t.index, _inputCount); } } // }
/// <summary> /// Save a model. /// </summary> /// <param name="xmlout">Where to save a model to.</param> /// <param name="model">The model to save to.</param> public static void SaveModel(WriteXML xmlout, svm_model model) { if (model != null) { xmlout.BeginTag(SVMNetworkPersistor.TAG_MODEL); svm_parameter param = model.param; xmlout.AddProperty(SVMNetworkPersistor.TAG_TYPE_SVM, svm_type_table[param.svm_type]); xmlout.AddProperty(SVMNetworkPersistor.TAG_TYPE_KERNEL, kernel_type_table[param.kernel_type]); if (param.kernel_type == svm_parameter.POLY) { xmlout.AddProperty(SVMNetworkPersistor.TAG_DEGREE, param.degree); } if (param.kernel_type == svm_parameter.POLY || param.kernel_type == svm_parameter.RBF || param.kernel_type == svm_parameter.SIGMOID) { xmlout.AddProperty(SVMNetworkPersistor.TAG_GAMMA, param.gamma); } if (param.kernel_type == svm_parameter.POLY || param.kernel_type == svm_parameter.SIGMOID) { xmlout.AddProperty(SVMNetworkPersistor.TAG_COEF0, param.coef0); } int nr_class = model.nr_class; int l = model.l; xmlout.AddProperty(SVMNetworkPersistor.TAG_NUMCLASS, nr_class); xmlout.AddProperty(SVMNetworkPersistor.TAG_TOTALSV, l); xmlout.AddProperty(SVMNetworkPersistor.TAG_RHO, model.rho, nr_class * (nr_class - 1) / 2); xmlout.AddProperty(SVMNetworkPersistor.TAG_LABEL, model.label, nr_class); xmlout.AddProperty(SVMNetworkPersistor.TAG_PROB_A, model.probA, nr_class * (nr_class - 1) / 2); xmlout.AddProperty(SVMNetworkPersistor.TAG_PROB_B, model.probB, nr_class * (nr_class - 1) / 2); xmlout.AddProperty(SVMNetworkPersistor.TAG_NSV, model.nSV, nr_class); xmlout.BeginTag(SVMNetworkPersistor.TAG_DATA); double[][] sv_coef = model.sv_coef; svm_node[][] SV = model.SV; StringBuilder line = new StringBuilder(); for (int i = 0; i < l; i++) { line.Length = 0; for (int j = 0; j < nr_class - 1; j++) line.Append(sv_coef[j][i] + " "); svm_node[] p = SV[i]; //if (param.kernel_type == svm_parameter.PRECOMPUTED) //{ // line.Append("0:" + (int) (p[0].value)); //} //else for (int j = 0; j < p.Length; j++) line.Append(p[j].index + ":" + p[j].value_Renamed + " "); xmlout.AddProperty(SVMNetworkPersistor.TAG_ROW, line.ToString()); } xmlout.EndTag(); xmlout.EndTag(); } }
/// <summary> /// Load the data from a model. /// </summary> /// <param name="xmlin">Where to read the data from.</param> /// <param name="model">The model to load data into.</param> private void HandleData(ReadXML xmlin, svm_model model) { int i = 0; int m = model.nr_class - 1; int l = model.l; model.sv_coef = EngineArray.AllocateDouble2D(m, l); model.SV = new svm_node[l][]; while (xmlin.ReadToTag()) { if (xmlin.IsIt(SVMNetworkPersistor.TAG_ROW, true)) { String line = xmlin.ReadTextToTag(); String[] st = xmlin.ReadTextToTag().Split(','); for (int k = 0; k < m; k++) model.sv_coef[k][i] = Double.Parse(st[i]); int n = st.Length / 2; model.SV[i] = new svm_node[n]; int idx = 0; for (int j = 0; j < n; j++) { model.SV[i][j] = new svm_node(); model.SV[i][j].index = int.Parse(st[idx++]); model.SV[i][j].value_Renamed = Double.Parse(st[idx++]); } i++; } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_DATA, false)) { break; } } }
/// <summary> /// Handle a model. /// </summary> /// <param name="xmlin">Where to read the model from.</param> /// <param name="model">Where to load the model into.</param> private void HandleModel(ReadXML xmlin, svm_model model) { while (xmlin.ReadToTag()) { if (xmlin.IsIt(SVMNetworkPersistor.TAG_TYPE_SVM, true)) { int i = EngineArray.FindStringInArray( SVMNetworkPersistor.svm_type_table, xmlin.ReadTextToTag()); model.param.svm_type = i; } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_DEGREE, true)) { model.param.degree = int.Parse(xmlin.ReadTextToTag()); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_GAMMA, true)) { model.param.gamma = double.Parse(xmlin.ReadTextToTag()); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_COEF0, true)) { model.param.coef0 = double.Parse(xmlin.ReadTextToTag()); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_NUMCLASS, true)) { model.nr_class = int.Parse(xmlin.ReadTextToTag()); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_TOTALSV, true)) { model.l = int.Parse(xmlin.ReadTextToTag()); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_RHO, true)) { int n = model.nr_class * (model.nr_class - 1) / 2; model.rho = new double[n]; String[] st = xmlin.ReadTextToTag().Split(','); for (int i = 0; i < n; i++) model.rho[i] = double.Parse(st[i]); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_LABEL, true)) { int n = model.nr_class; model.label = new int[n]; String[] st = xmlin.ReadTextToTag().Split(','); for (int i = 0; i < n; i++) model.label[i] = int.Parse(st[i]); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_PROB_A, true)) { int n = model.nr_class * (model.nr_class - 1) / 2; model.probA = new double[n]; String[] st = xmlin.ReadTextToTag().Split(','); for (int i = 0; i < n; i++) model.probA[i] = Double.Parse(st[i]); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_PROB_B, true)) { int n = model.nr_class * (model.nr_class - 1) / 2; model.probB = new double[n]; String[] st = xmlin.ReadTextToTag().Split(','); for (int i = 0; i < n; i++) model.probB[i] = Double.Parse(st[i]); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_NSV, true)) { int n = model.nr_class; model.nSV = new int[n]; String[] st = xmlin.ReadTextToTag().Split(','); for (int i = 0; i < n; i++) model.nSV[i] = int.Parse(st[i]); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_TYPE_KERNEL, true)) { int i = EngineArray.FindStringInArray( SVMNetworkPersistor.kernel_type_table, xmlin .ReadTextToTag()); model.param.kernel_type = i; } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_DATA, true)) { HandleData(xmlin, model); } else if (xmlin.IsIt(SVMNetworkPersistor.TAG_MODEL, false)) { break; } } }
public static svm_model svm_load_model(StringReader fp) { // read parameters var model = new svm_model(); var param = new svm_parameter(); model.param = param; model.rho = null; model.probA = null; model.probB = null; model.label = null; model.nSV = null; while (true) { String cmd = fp.ReadLine(); String arg = cmd.Substring(cmd.IndexOf(' ') + 1); if (cmd.StartsWith("svm_type")) { int i; for (i = 0; i < svm_type_table.Length; i++) { if (arg.IndexOf(svm_type_table[i]) != -1) { param.svm_type = i; break; } } if (i == svm_type_table.Length) { Console.Error.Write("unknown svm type.\n"); return null; } } else if (cmd.StartsWith("kernel_type")) { int i; for (i = 0; i < kernel_type_table.Length; i++) { if (arg.IndexOf(kernel_type_table[i]) != -1) { param.kernel_type = i; break; } } if (i == kernel_type_table.Length) { Console.Error.Write("unknown kernel function.\n"); return null; } } else if (cmd.StartsWith("degree")) param.degree = atof(arg); else if (cmd.StartsWith("gamma")) param.gamma = atof(arg); else if (cmd.StartsWith("coef0")) param.coef0 = atof(arg); else if (cmd.StartsWith("nr_class")) model.nr_class = atoi(arg); else if (cmd.StartsWith("total_sv")) model.l = atoi(arg); else if (cmd.StartsWith("rho")) { int n = model.nr_class*(model.nr_class - 1)/2; model.rho = new double[n]; var st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.rho[i] = atof(st.NextToken()); } else if (cmd.StartsWith("label")) { int n = model.nr_class; model.label = new int[n]; var st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.label[i] = atoi(st.NextToken()); } else if (cmd.StartsWith("probA")) { int n = model.nr_class*(model.nr_class - 1)/2; model.probA = new double[n]; var st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.probA[i] = atof(st.NextToken()); } else if (cmd.StartsWith("probB")) { int n = model.nr_class*(model.nr_class - 1)/2; model.probB = new double[n]; var st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.probB[i] = atof(st.NextToken()); } else if (cmd.StartsWith("nr_sv")) { int n = model.nr_class; model.nSV = new int[n]; var st = new SupportClass.Tokenizer(arg); for (int i = 0; i < n; i++) model.nSV[i] = atoi(st.NextToken()); } else if (cmd.StartsWith("SV")) { break; } else { Console.Error.Write("unknown text in model file\n"); return null; } } // read sv_coef and SV int m = model.nr_class - 1; int l = model.l; model.sv_coef = new double[m][]; for (int i = 0; i < m; i++) { model.sv_coef[i] = new double[l]; } model.SV = new svm_node[l][]; for (int i = 0; i < l; i++) { String line = fp.ReadLine(); var st = new SupportClass.Tokenizer(line, " \t\n\r\f:"); for (int k = 0; k < m; k++) model.sv_coef[k][i] = atof(st.NextToken()); int n = st.Count/2; model.SV[i] = new svm_node[n]; for (int j = 0; j < n; j++) { model.SV[i][j] = new svm_node(); model.SV[i][j].index = atoi(st.NextToken()); model.SV[i][j].value_Renamed = atof(st.NextToken()); } } return model; }
public static int svm_check_probability_model(svm_model model) { if (((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) && model.probA != null && model.probB != null) || ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) && model.probA != null)) return 1; else return 0; }
public static int svm_get_nr_class(svm_model model) { return model.nr_class; }