/// <summary> /// Performs cross validation. /// </summary> /// <param name="problem">The training data</param> /// <param name="parameters">The parameters to test</param> /// <param name="nrfold">The number of cross validations to use</param> /// <returns>The cross validation score</returns> public static double PerformCrossValidation(Problem problem, Parameter parameters, int nrfold) { string error = Procedures.svm_check_parameter(problem, parameters); if (error == null) return doCrossValidation(problem, parameters, nrfold); else throw new Exception(error); }
public static void Main(string[] args) { Problem train = Problem.Read("a1a.train.txt"); Problem test = Problem.Read("a1a.test.txt"); //For this example (and indeed, many scenarios), the default //parameters will suffice. Parameter parameters = new Parameter(); double C; double Gamma; //This will do a grid optimization to find the best parameters //and store them in C and Gamma, outputting the entire //search to params.txt. ParameterSelection.Grid(train, parameters, "params.txt", out C, out Gamma); parameters.C = C; parameters.Gamma = Gamma; //Train the model using the optimal parameters. Model model = Training.Train(train, parameters); //Perform classification on the test data, putting the //results in results.txt. Prediction.Predict(test, "results.txt", model, false); }
/// <summary> /// Constructor. /// </summary> /// <param name="rows">Nodes to use as the rows of the matrix</param> /// <param name="columns">Nodes to use as the columns of the matrix</param> /// <param name="param">Parameters to use when compute similarities</param> public PrecomputedKernel(List<Node[]> rows, List<Node[]> columns, Parameter param) { _rows = rows.Count; _columns = columns.Count; _similarities = new float[_rows, _columns]; for (int r = 0; r < _rows; r++) for (int c = 0; c < _columns; c++) _similarities[r, c] = (float)Kernel.KernelFunction(rows[r], columns[c], param); }
/// <summary> /// Constructor. /// </summary> /// <param name="nodes">Nodes for self-similarity analysis</param> /// <param name="param">Parameters to use when computing similarities</param> public PrecomputedKernel(List<Node[]> nodes, Parameter param) { _rows = nodes.Count; _columns = _rows; _similarities = new float[_rows, _columns]; for (int r = 0; r < _rows; r++) { for (int c = 0; c < r; c++) _similarities[r, c] = _similarities[c, r]; _similarities[r, r] = 1; for (int c = r + 1; c < _columns; c++) _similarities[r, c] = (float)Kernel.KernelFunction(nodes[r], nodes[c], param); } }
public Kernel(int l, Node[][] x_, Parameter param) { _kernelType = param.KernelType; _degree = param.Degree; _gamma = param.Gamma; _coef0 = param.Coefficient0; _x = (Node[][])x_.Clone(); if (_kernelType == KernelType.RBF) { _xSquare = new double[l]; for (int i = 0; i < l; i++) _xSquare[i] = dot(_x[i], _x[i]); } else _xSquare = null; }
public Model train(Problem issue) { var span = Overseer.observe("Training.Parameter-Choosing"); Parameter parameters = new Parameter(); parameters.KernelType = KernelType.RBF; double C; double Gamma; ParameterSelection.Grid(issue, parameters, null, out C, out Gamma); parameters.C = C; parameters.Gamma = Gamma; span.die(); span = Overseer.observe("Training.Training"); var result = Training.Train(issue, parameters); span.die(); return result; }
public static double KernelFunction(Node[] x, Node[] y, Parameter param) { switch (param.KernelType) { case KernelType.LINEAR: return dot(x, y); case KernelType.POLY: return powi(param.Degree * dot(x, y) + param.Coefficient0, param.Degree); case KernelType.RBF: { double sum = computeSquaredDistance(x, y); return Math.Exp(-param.Gamma * sum); } case KernelType.SIGMOID: return Math.Tanh(param.Gamma * dot(x, y) + param.Coefficient0); case KernelType.PRECOMPUTED: return x[(int)(y[0].Value)].Value; default: return 0; } }
/// public override void LearnAttributeToFactorMapping() { var svm_features = new List<Node[]>(); var relevant_items = new List<int>(); for (int i = 0; i < MaxItemID + 1; i++) { // ignore items w/o collaborative data if (Feedback.ItemMatrix[i].Count == 0) continue; // ignore items w/o attribute data if (item_attributes[i].Count == 0) continue; svm_features.Add( CreateNodes(i) ); relevant_items.Add(i); } // TODO proper random seed initialization Node[][] svm_features_array = svm_features.ToArray(); var svm_parameters = new Parameter(); svm_parameters.SvmType = SvmType.EPSILON_SVR; //svm_parameters.SvmType = SvmType.NU_SVR; svm_parameters.C = this.c; svm_parameters.Gamma = this.gamma; models = new Model[num_factors]; for (int f = 0; f < num_factors; f++) { double[] targets = new double[svm_features.Count]; for (int i = 0; i < svm_features.Count; i++) { int item_id = relevant_items[i]; targets[i] = item_factors[item_id, f]; } Problem svm_problem = new Problem(svm_features.Count, targets, svm_features_array, NumItemAttributes - 1); models[f] = SVM.Training.Train(svm_problem, svm_parameters); } _MapToLatentFactorSpace = Utils.Memoize<int, double[]>(__MapToLatentFactorSpace); }
private double TrainAndTest(string trainSet,string testSet, string resultFile) { Problem train = Problem.Read(trainSet); Problem test = Problem.Read(testSet); Parameter parameters = new Parameter(); if (chClassification.Checked) { parameters.SvmType = SvmType.C_SVC; parameters.C = 0.03; parameters.Gamma = 0.008; } else { parameters.SvmType = SvmType.EPSILON_SVR; parameters.C = 8; parameters.Gamma = 0.063; parameters.P = 0.5; } Model model = Training.Train(train, parameters); return Prediction.Predict(test, resultFile, model, true); }
public ONE_CLASS_Q(Problem prob, Parameter param) : base(prob.Count, prob.X, param) { cache = new Cache(prob.Count, (long)(param.CacheSize * (1 << 20))); QD = new float[prob.Count]; for (int i = 0; i < prob.Count; i++) QD[i] = (float)KernelFunction(i, i); }
// Stratified cross validation public static void svm_cross_validation(Problem prob, Parameter param, int nr_fold, double[] target) { Random rand = new Random(); int i; int[] fold_start = new int[nr_fold + 1]; int l = prob.Count; int[] perm = new int[l]; // stratified cv may not give leave-one-out rate // Each class to l folds -> some folds may have zero elements if ((param.SvmType == SvmType.C_SVC || param.SvmType == SvmType.NU_SVC) && nr_fold < l) { int[] tmp_nr_class = new int[1]; int[][] tmp_label = new int[1][]; int[][] tmp_start = new int[1][]; int[][] tmp_count = new int[1][]; svm_group_classes(prob, tmp_nr_class, tmp_label, tmp_start, tmp_count, perm); int nr_class = tmp_nr_class[0]; //int[] label = tmp_label[0]; int[] start = tmp_start[0]; int[] count = tmp_count[0]; // random shuffle and then data grouped by fold using the array perm int[] fold_count = new int[nr_fold]; int c; int[] index = new int[l]; for (i = 0; i < l; i++) index[i] = perm[i]; for (c = 0; c < nr_class; c++) for (i = 0; i < count[c]; i++) { int j = i + (int)(rand.NextDouble() * (count[c] - i)); do { int _ = index[start[c] + j]; index[start[c] + j] = index[start[c] + i]; index[start[c] + i] = _; } while (false); } for (i = 0; i < nr_fold; i++) { fold_count[i] = 0; for (c = 0; c < nr_class; c++) fold_count[i] += (i + 1) * count[c] / nr_fold - i * count[c] / nr_fold; } fold_start[0] = 0; for (i = 1; i <= nr_fold; i++) fold_start[i] = fold_start[i - 1] + fold_count[i - 1]; for (c = 0; c < nr_class; c++) for (i = 0; i < nr_fold; i++) { int begin = start[c] + i * count[c] / nr_fold; int end = start[c] + (i + 1) * count[c] / nr_fold; for (int j = begin; j < end; j++) { perm[fold_start[i]] = index[j]; fold_start[i]++; } } fold_start[0] = 0; for (i = 1; i <= nr_fold; i++) fold_start[i] = fold_start[i - 1] + fold_count[i - 1]; } else { for (i = 0; i < l; i++) perm[i] = i; for (i = 0; i < l; i++) { int j = i + (int)(rand.NextDouble() * (l - i)); do { int _ = perm[i]; perm[i] = perm[j]; perm[j] = _; } while (false); } for (i = 0; i <= nr_fold; i++) fold_start[i] = i * l / nr_fold; } for (i = 0; i < nr_fold; i++) { int begin = fold_start[i]; int end = fold_start[i + 1]; int j, k; Problem subprob = new Problem(); subprob.Count = l - (end - begin); subprob.X = new Node[subprob.Count][]; subprob.Y = new double[subprob.Count]; k = 0; for (j = 0; j < begin; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } for (j = end; j < l; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } Model submodel = svm_train(subprob, param); if (param.Probability && (param.SvmType == SvmType.C_SVC || param.SvmType == SvmType.NU_SVC)) { double[] prob_estimates = new double[svm_get_nr_class(submodel)]; for (j = begin; j < end; j++) target[perm[j]] = svm_predict_probability(submodel, prob.X[perm[j]], prob_estimates); } else for (j = begin; j < end; j++) target[perm[j]] = svm_predict(submodel, prob.X[perm[j]]); } }
static decision_function svm_train_one(Problem prob, Parameter param, double Cp, double Cn) { double[] alpha = new double[prob.Count]; Solver.SolutionInfo si = new Solver.SolutionInfo(); switch (param.SvmType) { case SvmType.C_SVC: solve_c_svc(prob, param, alpha, si, Cp, Cn); break; case SvmType.NU_SVC: solve_nu_svc(prob, param, alpha, si); break; case SvmType.ONE_CLASS: solve_one_class(prob, param, alpha, si); break; case SvmType.EPSILON_SVR: solve_epsilon_svr(prob, param, alpha, si); break; case SvmType.NU_SVR: solve_nu_svr(prob, param, alpha, si); break; } Procedures.info("obj = " + si.obj + ", rho = " + si.rho + "\n"); // output SVs int nSV = 0; int nBSV = 0; for (int i = 0; i < prob.Count; i++) { if (Math.Abs(alpha[i]) > 0) { ++nSV; if (prob.Y[i] > 0) { if (Math.Abs(alpha[i]) >= si.upper_bound_p) ++nBSV; } else { if (Math.Abs(alpha[i]) >= si.upper_bound_n) ++nBSV; } } } Procedures.info("nSV = " + nSV + ", nBSV = " + nBSV + "\n"); decision_function f = new decision_function(); f.alpha = alpha; f.rho = si.rho; return f; }
/// <summary> /// Performs a Grid parameter selection, trying all possible combinations of the two lists and returning the /// combination which performed best. Uses the default values of C and Gamma. /// </summary> /// <param name="problem">The training data</param> /// <param name="validation">The validation data</param> /// <param name="parameters">The parameters to use when optimizing</param> /// <param name="outputFile">The output file for the parameter results</param> /// <param name="C">The optimal C value will be placed in this variable</param> /// <param name="Gamma">The optimal Gamma value will be placed in this variable</param> public static void Grid( Problem problem, Problem validation, Parameter parameters, string outputFile, out double C, out double Gamma) { Grid(problem, validation, parameters, GetList(MIN_C, MAX_C, C_STEP), GetList(MIN_G, MAX_G, G_STEP), outputFile, out C, out Gamma); }
private static void solve_nu_svc(Problem prob, Parameter param, double[] alpha, Solver.SolutionInfo si) { int i; int l = prob.Count; double nu = param.Nu; sbyte[] y = new sbyte[l]; for (i = 0; i < l; i++) if (prob.Y[i] > 0) y[i] = +1; else y[i] = -1; double sum_pos = nu * l / 2; double sum_neg = nu * l / 2; for (i = 0; i < l; i++) if (y[i] == +1) { alpha[i] = Math.Min(1.0, sum_pos); sum_pos -= alpha[i]; } else { alpha[i] = Math.Min(1.0, sum_neg); sum_neg -= alpha[i]; } double[] zeros = new double[l]; for (i = 0; i < l; i++) zeros[i] = 0; Solver_NU s = new Solver_NU(); s.Solve(l, new SVC_Q(prob, param, y), zeros, y, alpha, 1.0, 1.0, param.EPS, si, param.Shrinking); double r = si.r; Procedures.info("C = " + 1 / r + "\n"); for (i = 0; i < l; i++) alpha[i] *= y[i] / r; si.rho /= r; si.obj /= (r * r); si.upper_bound_p = 1 / r; si.upper_bound_n = 1 / r; }
private static void solve_c_svc(Problem prob, Parameter param, double[] alpha, Solver.SolutionInfo si, double Cp, double Cn) { int l = prob.Count; double[] Minus_ones = new double[l]; sbyte[] y = new sbyte[l]; int i; for (i = 0; i < l; i++) { alpha[i] = 0; Minus_ones[i] = -1; if (prob.Y[i] > 0) y[i] = +1; else y[i] = -1; } Solver s = new Solver(); s.Solve(l, new SVC_Q(prob, param, y), Minus_ones, y, alpha, Cp, Cn, param.EPS, si, param.Shrinking); double sum_alpha = 0; for (i = 0; i < l; i++) sum_alpha += alpha[i]; if (Cp == Cn) Procedures.info("nu = " + sum_alpha / (Cp * prob.Count) + "\n"); for (i = 0; i < l; i++) alpha[i] *= y[i]; }
// // Interface functions // public static Model svm_train(Problem prob, Parameter param) { Model model = new Model(); model.Parameter = param; if (param.SvmType == SvmType.ONE_CLASS || param.SvmType == SvmType.EPSILON_SVR || param.SvmType == SvmType.NU_SVR) { // regression or one-class-svm model.NumberOfClasses = 2; model.ClassLabels = null; model.NumberOfSVPerClass = null; model.PairwiseProbabilityA = null; model.PairwiseProbabilityB = null; model.SupportVectorCoefficients = new double[1][]; if (param.Probability && (param.SvmType == SvmType.EPSILON_SVR || param.SvmType == SvmType.NU_SVR)) { model.PairwiseProbabilityA = new double[1]; model.PairwiseProbabilityA[0] = svm_svr_probability(prob, param); } decision_function f = svm_train_one(prob, param, 0, 0); model.Rho = new double[1]; model.Rho[0] = f.rho; int nSV = 0; int i; for (i = 0; i < prob.Count; i++) if (Math.Abs(f.alpha[i]) > 0) ++nSV; model.SupportVectorCount = nSV; model.SupportVectors = new Node[nSV][]; model.SupportVectorCoefficients[0] = new double[nSV]; int j = 0; for (i = 0; i < prob.Count; i++) if (Math.Abs(f.alpha[i]) > 0) { model.SupportVectors[j] = prob.X[i]; model.SupportVectorCoefficients[0][j] = f.alpha[i]; ++j; } } else { // classification int l = prob.Count; int[] tmp_nr_class = new int[1]; int[][] tmp_label = new int[1][]; int[][] tmp_start = new int[1][]; int[][] tmp_count = new int[1][]; int[] perm = new int[l]; // group training data of the same class svm_group_classes(prob, tmp_nr_class, tmp_label, tmp_start, tmp_count, perm); int nr_class = tmp_nr_class[0]; int[] label = tmp_label[0]; int[] start = tmp_start[0]; int[] count = tmp_count[0]; Node[][] x = new Node[l][]; int i; for (i = 0; i < l; i++) x[i] = prob.X[perm[i]]; // calculate weighted C double[] weighted_C = new double[nr_class]; for (i = 0; i < nr_class; i++) weighted_C[i] = param.C; foreach (int weightedLabel in param.Weights.Keys) { int index = Array.IndexOf<int>(label, weightedLabel); if (index < 0) Console.Error.WriteLine("warning: class label " + weightedLabel + " specified in weight is not found"); else weighted_C[index] *= param.Weights[weightedLabel]; } // train k*(k-1)/2 models bool[] nonzero = new bool[l]; for (i = 0; i < l; i++) nonzero[i] = false; decision_function[] f = new decision_function[nr_class * (nr_class - 1) / 2]; double[] probA = null, probB = null; if (param.Probability) { probA = new double[nr_class * (nr_class - 1) / 2]; probB = new double[nr_class * (nr_class - 1) / 2]; } int p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { Problem sub_prob = new Problem(); int si = start[i], sj = start[j]; int ci = count[i], cj = count[j]; sub_prob.Count = ci + cj; sub_prob.X = new Node[sub_prob.Count][]; sub_prob.Y = new double[sub_prob.Count]; int k; for (k = 0; k < ci; k++) { sub_prob.X[k] = x[si + k]; sub_prob.Y[k] = +1; } for (k = 0; k < cj; k++) { sub_prob.X[ci + k] = x[sj + k]; sub_prob.Y[ci + k] = -1; } if (param.Probability) { double[] probAB = new double[2]; svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB); probA[p] = probAB[0]; probB[p] = probAB[1]; } f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]); for (k = 0; k < ci; k++) if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0) nonzero[si + k] = true; for (k = 0; k < cj; k++) if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0) nonzero[sj + k] = true; ++p; } // build output model.NumberOfClasses = nr_class; model.ClassLabels = new int[nr_class]; for (i = 0; i < nr_class; i++) model.ClassLabels[i] = label[i]; model.Rho = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) model.Rho[i] = f[i].rho; if (param.Probability) { model.PairwiseProbabilityA = new double[nr_class * (nr_class - 1) / 2]; model.PairwiseProbabilityB = new double[nr_class * (nr_class - 1) / 2]; for (i = 0; i < nr_class * (nr_class - 1) / 2; i++) { model.PairwiseProbabilityA[i] = probA[i]; model.PairwiseProbabilityB[i] = probB[i]; } } else { model.PairwiseProbabilityA = null; model.PairwiseProbabilityB = null; } int nnz = 0; int[] nz_count = new int[nr_class]; model.NumberOfSVPerClass = new int[nr_class]; for (i = 0; i < nr_class; i++) { int nSV = 0; for (int j = 0; j < count[i]; j++) if (nonzero[start[i] + j]) { ++nSV; ++nnz; } model.NumberOfSVPerClass[i] = nSV; nz_count[i] = nSV; } Procedures.info("Total nSV = " + nnz + "\n"); model.SupportVectorCount = nnz; model.SupportVectors = new Node[nnz][]; p = 0; for (i = 0; i < l; i++) if (nonzero[i]) model.SupportVectors[p++] = x[i]; int[] nz_start = new int[nr_class]; nz_start[0] = 0; for (i = 1; i < nr_class; i++) nz_start[i] = nz_start[i - 1] + nz_count[i - 1]; model.SupportVectorCoefficients = new double[nr_class - 1][]; for (i = 0; i < nr_class - 1; i++) model.SupportVectorCoefficients[i] = new double[nnz]; p = 0; for (i = 0; i < nr_class; i++) for (int j = i + 1; j < nr_class; j++) { // classifier (i,j): coefficients with // i are in sv_coef[j-1][nz_start[i]...], // j are in sv_coef[i][nz_start[j]...] int si = start[i]; int sj = start[j]; int ci = count[i]; int cj = count[j]; int q = nz_start[i]; int k; for (k = 0; k < ci; k++) if (nonzero[si + k]) model.SupportVectorCoefficients[j - 1][q++] = f[p].alpha[k]; q = nz_start[j]; for (k = 0; k < cj; k++) if (nonzero[sj + k]) model.SupportVectorCoefficients[i][q++] = f[p].alpha[ci + k]; ++p; } } return model; }
public void startSurfTrain() { List<FileInfo> trainingFiles = new List<FileInfo>(1000); DirectoryInfo di = new DirectoryInfo(Constants.base_folder + "train_" + Constants.CIRCLE_TRIANGLE); DirectoryInfo[] dirs = di.GetDirectories("*"); foreach (DirectoryInfo dir in dirs) { int i = 0; FileInfo[] files = dir.GetFiles("*.bmp"); foreach (FileInfo fi in files) { trainingFiles.Add(fi); if (i++ > Constants.MAX_TRAIN_SAMPLE) break; } } double[] class_labels = new double[trainingFiles.Count]; Node[][] nodes = new Node[trainingFiles.Count][]; for (int i = 0; i < trainingFiles.Count; i++) { Bitmap bmp = (Bitmap)Bitmap.FromFile(trainingFiles[i].FullName, false); int com_x_sum = 0, com_y_sum = 0, com_x_y_point_count = 0; System.Drawing.Imaging.BitmapData image_data = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), System.Drawing.Imaging.ImageLockMode.ReadWrite, bmp.PixelFormat); int bpp = 3; int nOffset = image_data.Stride - bmp.Width * bpp; System.IntPtr Scan0 = image_data.Scan0; unsafe { byte* p = (byte*)Scan0; for (int y = 0; y < Constants.SIGN_HEIGHT; y++) { for (int x = 0; x < Constants.SIGN_WIDTH; x++, p += bpp) { if (p[2] == 0) { com_x_sum += x; com_y_sum += y; com_x_y_point_count++; } } p += nOffset; } } bmp.UnlockBits(image_data); int com_x = com_x_sum / com_x_y_point_count; int com_y = com_y_sum / com_x_y_point_count; Node[] nds = new Node[NNTrain.numOfinputs]; nodes[i] = nds; bmp.Tag = trainingFiles[i].Name; fillFeatures_SURF(bmp, com_x, com_y, nds); class_labels[i] = Double.Parse(trainingFiles[i].Directory.Name); } Problem problem = new Problem(nodes.Length, class_labels, nodes, NNTrain.numOfinputs + 1); // RangeTransform range = Scaling.DetermineRange(problem); // problem = Scaling.Scale(problem, range); Parameter param = new Parameter(); param.KernelType = KernelType.POLY; // param.KernelType = KernelType.LINEAR; // param.KernelType = KernelType.RBF; param.SvmType = SvmType.NU_SVC; param.C = 2; param.Gamma = .5; //param.KernelType = KernelType.POLY; /* double C, Gamma; ParameterSelection.Grid(problem, param, Constants.base_folder + "params_" + type + ".txt", out C, out Gamma); param.C = C; param.Gamma = Gamma; //param.Probability = true; */ Model model = Training.Train(problem, param); Stream stream = new FileStream(Constants.base_folder + Constants.NN_SVM_SURF + "_" + Constants.CIRCLE_TRIANGLE + ".dat", FileMode.Create, FileAccess.Write, FileShare.None); BinaryFormatter b = new BinaryFormatter(); b.Serialize(stream, model); stream.Close(); }
/// public override void Train() { int num_users = Feedback.UserMatrix.NumberOfRows; // DH: should be based on MaxUserID for cold case? TODO: investigate. int num_items = Feedback.ItemMatrix.NumberOfRows; var svm_features = new List<Node[]>(); Node[][] svm_features_array = svm_features.ToArray(); var svm_parameters = new Parameter(); svm_parameters.SvmType = SvmType.EPSILON_SVR; //svm_parameters.SvmType = SvmType.NU_SVR; svm_parameters.C = this.c; svm_parameters.Gamma = this.gamma; // user-wise training this.models = new Model[num_users]; for (int u = 0; u < num_users; u++) { var targets = new double[num_items]; for (int i = 0; i < num_items; i++) targets[i] = Feedback.UserMatrix[u, i] ? 1 : 0; Problem svm_problem = new Problem(svm_features.Count, targets, svm_features_array, NumItemAttributes - 1); // TODO check models[u] = SVM.Training.Train(svm_problem, svm_parameters); } }
/// <summary> /// Performs a Grid parameter selection, trying all possible combinations of the two lists and returning the /// combination which performed best. Use this method if there is no validation data available, and it will /// divide it 5 times to allow 5-fold validation (training on 4/5 and validating on 1/5, 5 times). /// </summary> /// <param name="problem">The training data</param> /// <param name="parameters">The parameters to use when optimizing</param> /// <param name="CValues">The set of C values to use</param> /// <param name="GammaValues">The set of Gamma values to use</param> /// <param name="outputFile">Output file for the parameter results.</param> /// <param name="C">The optimal C value will be put into this variable</param> /// <param name="Gamma">The optimal Gamma value will be put into this variable</param> public static void Grid( Problem problem, Parameter parameters, List<double> CValues, List<double> GammaValues, string outputFile, out double C, out double Gamma) { Grid(problem, parameters, CValues, GammaValues, outputFile, NFOLD, out C, out Gamma); }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { Problem problem = new Problem(_X.Count, _Y.ToArray(), _X.ToArray(), 2); RangeTransform range = RangeTransform.Compute(problem); problem = range.Scale(problem); Parameter param = new Parameter(); param.C = 2; param.Gamma = .5; Model model = Training.Train(problem, param); Model.Write("model.txt", model); int rows = ClientSize.Height; int columns = ClientSize.Width; Bitmap image = new Bitmap(columns, rows); int centerR = rows / 2; int centerC = columns / 2; BitmapData buf = image.LockBits(new Rectangle(0, 0, columns, rows), ImageLockMode.WriteOnly, PixelFormat.Format24bppRgb); unsafe { byte* ptr = (byte*)buf.Scan0; int stride = buf.Stride; for (int r = 0; r < rows; r++) { byte* scan = ptr; for (int c = 0; c < columns; c++) { int x = c - centerC; int y = r - centerR; Node[] test = new Node[] { new Node(1, x), new Node(2, y) }; test = range.Transform(test); int assignment = (int)Prediction.Predict(model, test); //int assignment = (int)Prediction.Predict(problem, "predict.txt", model, test); *scan++ = CLASS_FILL[assignment].B; *scan++ = CLASS_FILL[assignment].G; *scan++ = CLASS_FILL[assignment].R; } ptr += stride; } } image.UnlockBits(buf); lock (this) { _canvas = new Bitmap(image); } }
/// <summary> /// Performs a Grid parameter selection, trying all possible combinations of the two lists and returning the /// combination which performed best. /// </summary> /// <param name="problem">The training data</param> /// <param name="validation">The validation data</param> /// <param name="parameters">The parameters to use when optimizing</param> /// <param name="CValues">The C values to use</param> /// <param name="GammaValues">The Gamma values to use</param> /// <param name="outputFile">The output file for the parameter results</param> /// <param name="C">The optimal C value will be placed in this variable</param> /// <param name="Gamma">The optimal Gamma value will be placed in this variable</param> public static void Grid( Problem problem, Problem validation, Parameter parameters, List<double> CValues, List<double> GammaValues, string outputFile, out double C, out double Gamma) { C = 0; Gamma = 0; double maxScore = double.MinValue; StreamWriter output = null; if(outputFile != null) output = new StreamWriter(outputFile); for (int i = 0; i < CValues.Count; i++) for (int j = 0; j < GammaValues.Count; j++) { parameters.C = CValues[i]; parameters.Gamma = GammaValues[j]; Model model = Training.Train(problem, parameters); double test = Prediction.Predict(validation, "tmp.txt", model, false); Console.Write("{0} {1} {2}", parameters.C, parameters.Gamma, test); if(output != null) output.WriteLine("{0} {1} {2}", parameters.C, parameters.Gamma, test); if (test > maxScore) { C = parameters.C; Gamma = parameters.Gamma; maxScore = test; Console.WriteLine(" New Maximum!"); } else Console.WriteLine(); } if(output != null) output.Close(); }
public SVC_Q(Problem prob, Parameter param, sbyte[] y_) : base(prob.Count, prob.X, param) { y = (sbyte[])y_.Clone(); cache = new Cache(prob.Count, (long)(param.CacheSize * (1 << 20))); QD = new float[prob.Count]; for (int i = 0; i < prob.Count; i++) QD[i] = (float)KernelFunction(i, i); }
public SVR_Q(Problem prob, Parameter param) : base(prob.Count, prob.X, param) { l = prob.Count; cache = new Cache(l, (long)(param.CacheSize * (1 << 20))); QD = new float[2 * l]; sign = new sbyte[2 * l]; index = new int[2 * l]; for (int k = 0; k < l; k++) { sign[k] = 1; sign[k + l] = -1; index[k] = k; index[k + l] = k; QD[k] = (float)KernelFunction(k, k); QD[k + l] = QD[k]; } buffer = new float[2][]; buffer[0] = new float[2 * l]; buffer[1] = new float[2 * l]; next_buffer = 0; }
private static void solve_one_class(Problem prob, Parameter param, double[] alpha, Solver.SolutionInfo si) { int l = prob.Count; double[] zeros = new double[l]; sbyte[] ones = new sbyte[l]; int i; int n = (int)(param.Nu * prob.Count); // # of alpha's at upper bound for (i = 0; i < n; i++) alpha[i] = 1; if (n < prob.Count) alpha[n] = param.Nu * prob.Count - n; for (i = n + 1; i < l; i++) alpha[i] = 0; for (i = 0; i < l; i++) { zeros[i] = 0; ones[i] = 1; } Solver s = new Solver(); s.Solve(l, new ONE_CLASS_Q(prob, param), zeros, ones, alpha, 1.0, 1.0, param.EPS, si, param.Shrinking); }
public static string svm_check_parameter(Problem prob, Parameter param) { // svm_type SvmType svm_type = param.SvmType; // kernel_type, degree //KernelType kernel_type = param.KernelType; if (param.Degree < 0) return "degree of polynomial kernel < 0"; // cache_size,eps,C,nu,p,shrinking if (param.CacheSize <= 0) return "cache_size <= 0"; if (param.EPS <= 0) return "eps <= 0"; if (param.Gamma == 0) param.Gamma = 1.0 / prob.MaxIndex; if (svm_type == SvmType.C_SVC || svm_type == SvmType.EPSILON_SVR || svm_type == SvmType.NU_SVR) if (param.C <= 0) return "C <= 0"; if (svm_type == SvmType.NU_SVC || svm_type == SvmType.ONE_CLASS || svm_type == SvmType.NU_SVR) if (param.Nu <= 0 || param.Nu > 1) return "nu <= 0 or nu > 1"; if (svm_type == SvmType.EPSILON_SVR) if (param.P < 0) return "p < 0"; if (param.Probability && svm_type == SvmType.ONE_CLASS) return "one-class SVM probability output not supported yet"; // check whether nu-svc is feasible if (svm_type == SvmType.NU_SVC) { int l = prob.Count; int Max_nr_class = 16; int nr_class = 0; int[] label = new int[Max_nr_class]; int[] count = new int[Max_nr_class]; int i; for (i = 0; i < l; i++) { int this_label = (int)prob.Y[i]; int j; for (j = 0; j < nr_class; j++) if (this_label == label[j]) { ++count[j]; break; } if (j == nr_class) { if (nr_class == Max_nr_class) { Max_nr_class *= 2; int[] new_data = new int[Max_nr_class]; Array.Copy(label, 0, new_data, 0, label.Length); label = new_data; new_data = new int[Max_nr_class]; Array.Copy(count, 0, new_data, 0, count.Length); count = new_data; } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } for (i = 0; i < nr_class; i++) { int n1 = count[i]; for (int j = i + 1; j < nr_class; j++) { int n2 = count[j]; if (param.Nu * (n1 + n2) / 2 > Math.Min(n1, n2)) return "specified nu is infeasible"; } } } return null; }
// Cross-validation decision values for probability estimates private static void svm_binary_svc_probability(Problem prob, Parameter param, double Cp, double Cn, double[] probAB) { int i; int nr_fold = 5; int[] perm = new int[prob.Count]; double[] dec_values = new double[prob.Count]; // random shuffle Random rand = new Random(); for (i = 0; i < prob.Count; i++) perm[i] = i; for (i = 0; i < prob.Count; i++) { int j = i + (int)(rand.NextDouble() * (prob.Count - i)); do { int _ = perm[i]; perm[i] = perm[j]; perm[j] = _; } while (false); } for (i = 0; i < nr_fold; i++) { int begin = i * prob.Count / nr_fold; int end = (i + 1) * prob.Count / nr_fold; int j, k; Problem subprob = new Problem(); subprob.Count = prob.Count - (end - begin); subprob.X = new Node[subprob.Count][]; subprob.Y = new double[subprob.Count]; k = 0; for (j = 0; j < begin; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } for (j = end; j < prob.Count; j++) { subprob.X[k] = prob.X[perm[j]]; subprob.Y[k] = prob.Y[perm[j]]; ++k; } int p_count = 0, n_count = 0; for (j = 0; j < k; j++) if (subprob.Y[j] > 0) p_count++; else n_count++; if (p_count == 0 && n_count == 0) for (j = begin; j < end; j++) dec_values[perm[j]] = 0; else if (p_count > 0 && n_count == 0) for (j = begin; j < end; j++) dec_values[perm[j]] = 1; else if (p_count == 0 && n_count > 0) for (j = begin; j < end; j++) dec_values[perm[j]] = -1; else { Parameter subparam = (Parameter)param.Clone(); subparam.Probability = false; subparam.C = 1.0; subparam.Weights[1] = Cp; subparam.Weights[-1] = Cn; Model submodel = svm_train(subprob, subparam); for (j = begin; j < end; j++) { double[] dec_value = new double[1]; svm_predict_values(submodel, prob.X[perm[j]], dec_value); dec_values[perm[j]] = dec_value[0]; // ensure +1 -1 order; reason not using CV subroutine dec_values[perm[j]] *= submodel.ClassLabels[0]; } } } sigmoid_train(prob.Count, dec_values, prob.Y, probAB); }
private static void solve_epsilon_svr(Problem prob, Parameter param, double[] alpha, Solver.SolutionInfo si) { int l = prob.Count; double[] alpha2 = new double[2 * l]; double[] linear_term = new double[2 * l]; sbyte[] y = new sbyte[2 * l]; int i; for (i = 0; i < l; i++) { alpha2[i] = 0; linear_term[i] = param.P - prob.Y[i]; y[i] = 1; alpha2[i + l] = 0; linear_term[i + l] = param.P + prob.Y[i]; y[i + l] = -1; } Solver s = new Solver(); s.Solve(2 * l, new SVR_Q(prob, param), linear_term, y, alpha2, param.C, param.C, param.EPS, si, param.Shrinking); double sum_alpha = 0; for (i = 0; i < l; i++) { alpha[i] = alpha2[i] - alpha2[i + l]; sum_alpha += Math.Abs(alpha[i]); } Procedures.info("nu = " + sum_alpha / (param.C * l) + "\n"); }
// Return parameter of a Laplace distribution private static double svm_svr_probability(Problem prob, Parameter param) { int i; int nr_fold = 5; double[] ymv = new double[prob.Count]; double mae = 0; Parameter newparam = (Parameter)param.Clone(); newparam.Probability = false; svm_cross_validation(prob, newparam, nr_fold, ymv); for (i = 0; i < prob.Count; i++) { ymv[i] = prob.Y[i] - ymv[i]; mae += Math.Abs(ymv[i]); } mae /= prob.Count; double std = Math.Sqrt(2 * mae * mae); int count = 0; mae = 0; for (i = 0; i < prob.Count; i++) if (Math.Abs(ymv[i]) > 5 * std) count = count + 1; else mae += Math.Abs(ymv[i]); mae /= (prob.Count - count); Procedures.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + mae + "\n"); return mae; }
private static void solve_nu_svr(Problem prob, Parameter param, double[] alpha, Solver.SolutionInfo si) { int l = prob.Count; double C = param.C; double[] alpha2 = new double[2 * l]; double[] linear_term = new double[2 * l]; sbyte[] y = new sbyte[2 * l]; int i; double sum = C * param.Nu * l / 2; for (i = 0; i < l; i++) { alpha2[i] = alpha2[i + l] = Math.Min(sum, C); sum -= alpha2[i]; linear_term[i] = -prob.Y[i]; y[i] = 1; linear_term[i + l] = prob.Y[i]; y[i + l] = -1; } Solver_NU s = new Solver_NU(); s.Solve(2 * l, new SVR_Q(prob, param), linear_term, y, alpha2, C, C, param.EPS, si, param.Shrinking); Procedures.info("epsilon = " + (-si.r) + "\n"); for (i = 0; i < l; i++) alpha[i] = alpha2[i] - alpha2[i + l]; }
/// <summary> /// Performs a Grid parameter selection, trying all possible combinations of the two lists and returning the /// combination which performed best. Use this method if validation data isn't available, as it will /// divide the training data and train on a portion of it and test on the rest. /// </summary> /// <param name="problem">The training data</param> /// <param name="parameters">The parameters to use when optimizing</param> /// <param name="CValues">The set of C values to use</param> /// <param name="GammaValues">The set of Gamma values to use</param> /// <param name="outputFile">Output file for the parameter results.</param> /// <param name="nrfold">The number of times the data should be divided for validation</param> /// <param name="C">The optimal C value will be placed in this variable</param> /// <param name="Gamma">The optimal Gamma value will be placed in this variable</param> public static void Grid( Problem problem, Parameter parameters, List<double> CValues, List<double> GammaValues, string outputFile, int nrfold, out double C, out double Gamma) { C = 0; Gamma = 0; double crossValidation = double.MinValue; StreamWriter output = null; if(outputFile != null) output = new StreamWriter(outputFile); for(int i=0; i<CValues.Count; i++) for (int j = 0; j < GammaValues.Count; j++) { parameters.C = CValues[i]; parameters.Gamma = GammaValues[j]; double test = Training.PerformCrossValidation(problem, parameters, nrfold); if(output != null) output.WriteLine("{0} {1} {2}", parameters.C, parameters.Gamma, test); if (test > crossValidation) { C = parameters.C; Gamma = parameters.Gamma; crossValidation = test; } } if(output != null) output.Close(); }