static void Main(string[] args) { var path = Environment.CurrentDirectory; string DvCPath = System.IO.Path.Combine(path, DvC_TEST_FILE); string DvHPath = System.IO.Path.Combine(path, DvH_TEST_FILE); string HvCPath = System.IO.Path.Combine(path, HvC_TEST_FILE); DvC_prob = ProblemHelper.ReadAndScaleProblem(DvCPath); DvH_prob = ProblemHelper.ReadAndScaleProblem(DvHPath); HvC_prob = ProblemHelper.ReadAndScaleProblem(HvCPath); var DvCsvm = new C_SVC(DvC_prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); var DvHsvm = new C_SVC(DvH_prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); var HvCsvm = new C_SVC(HvC_prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); var DvCcva = DvCsvm.GetCrossValidationAccuracy(5); var DvHcva = DvHsvm.GetCrossValidationAccuracy(2); var HvCcva = HvCsvm.GetCrossValidationAccuracy(5); DvCsvm.Export(System.IO.Path.Combine(path, DvC_MODEL_FILE)); DvHsvm.Export(System.IO.Path.Combine(path, DvH_MODEL_FILE)); HvCsvm.Export(System.IO.Path.Combine(path, HvC_MODEL_FILE)); Console.WriteLine(String.Format("--------------------------")); Console.WriteLine(String.Format("DvC Result: {0}%", (Math.Round(DvCcva * 100, 2)).ToString())); Console.WriteLine(String.Format("DvH Result: {0}%", (Math.Round(DvHcva * 100, 2)).ToString())); Console.WriteLine(String.Format("HvC Result: {0}%", (Math.Round(HvCcva * 100, 2)).ToString())); Console.WriteLine(String.Format("--------------------------")); Console.ReadKey(); }
private void buttonSVM_Click(object sender, EventArgs e) { List <double> values = new List <double>(); foreach (var column in checkedListBoxVariableRellenar.SelectedItems) { for (int i = 0; i < dt.Rows.Count; i++) { values.Add(double.Parse(dt.Rows[i][column.ToString()].ToString())); } var dataTraining = ProblemHelper.ReadAndScaleProblem(new List <List <double> >() { values }); var svm = new Epsilon_SVR(DataProblem, KernelHelper.RadialBasisFunctionKernel(Gamma), C, Elipson); double mse = svm.GetMeanSquaredError(); var prediction = svm.Predict(dataTraining.x[0]); } // 1. primero se debe armar una subtabla con los atributos que se van a utilizar. // que serian los que estan en el checkbox. // 2. elegir la columna sobre la que se quiere rellenar valores // 3. se quitan los registros que contengan datos faltantes de las variables predictoras, para este caso son los que tengan valor de -200 // 4. Aplicar el algoritmo de VSM // 5. Generar Vista con valores resultado // 6. Generar resumen de resultados: en tal fila, cambie tal por tal. }
static void Main(string[] args) { string[] pathes = Directory.GetFiles("C:\\Users\\Tanya\\Documents\\tests_data\\db"); StreamWriter writer = new StreamWriter("C:\\Users\\Tanya\\Documents\\Results\\AlgorithmVSCOMEResult.txt", true); for (int i = 0; i < 3 /*pathes.GetLength(0)*/; i++) { Tuple <int, int> redPoint = ImageHelper.FindColorPoint(pathes[i]); double[,] imgBytes = ImageEnhancementHelper.EnhanceImage(ImageHelper.LoadImage(pathes[i])); double[,] orientationField = OrientationFieldGenerator.GenerateOrientationField(imgBytes.Select2D(x => (int)x)); Complex[,] complexOrientationField = orientationField.Select2D(x => (new Complex(Math.Cos(2 * x), Math.Sin(2 * x)))); Complex[,] filter = Filter.GetFilter(orientationField); Complex[,] complexFilteredField = ConvolutionHelper.ComplexConvolve(complexOrientationField, filter); double[,] filteredField = complexFilteredField.Select2D(x => x.Magnitude); VSCOME vscome = new VSCOME(orientationField, filteredField); double[,] vscomeValue = vscome.CalculateVscomeValue(); Tuple <int, int> corePoint = KernelHelper.Max2dPosition(vscomeValue); writer.WriteLine(GetDistance(redPoint, corePoint)); // ImageHelper.SaveArray(orientationField, "C:\\Users\\Tanya\\Documents\\Results\\orientationField.jpg"); // ImageHelper.SaveArray(filteredField, "C:\\Users\\Tanya\\Documents\\Results\\filteredField.jpg"); //ImageHelper.SaveArray(vscomeValue, "C:\\Users\\Tanya\\Documents\\Results\\vscomeValue_1.jpg"); } writer.Close(); }
public C_SVC getmodel() { List <string> x = new List <string>(); List <double> yb = new List <double>(); foreach (var obj in _context.PlainTickets) { double val = -1; x.Add(_context.Countries.Where(ct => ct.Key == _context.Targets. Where(t => t.Key == obj.Target).FirstOrDefault().CountryName). FirstOrDefault().CountryName); if (obj.IsSold) { val = 1; } yb.Add(val); } double[] y = yb.ToArray(); this.vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, vocabulary.ToList()); const int C = 1; var model = new C_SVC(problem, KernelHelper.LinearKernel(), C); return(model); }
static void Main2(string path) { // STEP 4: Read the data const string dataFilePath = @"D:\texto.csv"; var dataTable = DataTable.New.ReadCsv(dataFilePath); List <string> x = dataTable.Rows.Select(row => row["Text"]).ToList(); double[] y = dataTable.Rows.Select(row => double.Parse(row["IsSunny"])) .ToArray(); string texto = File.ReadAllText(path + @"/datoscsv.csv", Encoding.Default); List <string> x2 = new List <string>(); double[] y2 = null; arreglar_dato(texto, ref x2, ref y2); var vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, vocabulary.ToList()); // If you want you can save this problem with : // ProblemHelper.WriteProblem(@"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.problem", problem); // And then load it again using: // var problem = ProblemHelper.ReadProblem(@"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.problem"); const int C = 1; var model = new C_SVC(problem, KernelHelper.LinearKernel(), C); var accuracy = model.GetCrossValidationAccuracy(10); // Console.Clear(); // Console.WriteLine(new string('=', 50)); // Console.WriteLine("Accuracy of the model is {0:P}", accuracy); // model.Export(string.Format(@"D:\MACHINE_LEARNING\SVM\Tutorial\model_{0}_accuracy.model", accuracy)); // Console.WriteLine(new string('=', 50)); // Console.WriteLine("The model is trained. \r\nEnter a sentence to make a prediction. (ex: sunny rainy sunny)"); // Console.WriteLine(new string('=', 50)); string userInput; _predictionDictionary = new Dictionary <int, string> { { -1, "Rainy" }, { 1, "Sunny" } }; userInput = "caries"; var newX = TextClassificationProblemBuilder.CreateNode(userInput, vocabulary); var predictedY = model.Predict(newX); // Console.WriteLine("The prediction is {0}", _predictionDictionary[(int)predictedY]); // Console.WriteLine(new string('=', 50)); Console.WriteLine(""); }
private SvmMethod() { var path = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Files/SentimentAnalysisDataset.csv"); List <string> x = new List <string>(); List <double> y = new List <double>(); if (File.Exists(path)) { var lines = File.ReadAllLines(path); for (int i = 0; i < 500; i++)//5146 { var lineArr = lines[i].Split(new string[] { ",Sentiment140,", ",Kaggle," }, StringSplitOptions.None); y.Add(double.Parse(lineArr[0].Split(',')[1])); x.Add(lineArr[1].Trim()); } } //var dataTable = DataTable.New.ReadCsv(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Files/spamdata.csv")); //List<string> x = dataTable.Rows.Select(row => row["Text"]).ToList(); //double[] y = dataTable.Rows.Select(row => double.Parse(row["IsSpam"])).ToArray(); vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y.ToArray(), vocabulary.ToList()); const int C = 1; model = new C_SVC(problem, KernelHelper.LinearKernel(), C); _predictionDictionary = new Dictionary <int, string> { { 0, "negative" }, { 1, "positive" } }; }
public static void Main() { // STEP 4: Read the data string dataFilePath = HttpContext.Current.Server.MapPath("~/DAL/svm/"); var dataTable = DataTable.New.ReadCsv(dataFilePath + "Data.csv"); List <string> x = dataTable.Rows.Select(row => row["Text"]).ToList(); double[] y = dataTable.Rows.Select(row => double.Parse(row["Category"])) .ToArray(); vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, vocabulary.ToList()); // If you want you can save this problem with : // ProblemHelper.WriteProblem(@"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.problem", problem); // And then load it again using: // var problem = ProblemHelper.ReadProblem(@"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.problem"); const int C = 1; model = new C_SVC(problem, KernelHelper.LinearKernel(), C); var accuracy = model.GetCrossValidationAccuracy(10); Console.Clear(); Console.WriteLine(new string('=', 50)); Console.WriteLine("Accuracy of the model is {0:P}", accuracy); model.Export(string.Format(dataFilePath + "model_{0}_accuracy.model", accuracy)); Console.WriteLine(new string('=', 50)); Console.WriteLine("The model is trained. \r\nEnter a sentence to make a prediction. (ex: sunny rainy sunny)"); Console.WriteLine(new string('=', 50)); }
/// <summary> ///Show how to get the Mean Squared Error ///</summary> //[TestMethod()] public void GetMeanSquaredErrorTest() { var svm = new Epsilon_SVR(training_prob, KernelHelper.RadialBasisFunctionKernel(gamma), C, epsilon); double cms = svm.GetMeanSquaredError(); Assert.IsTrue(cms > 0); }
private static void TrainingRecordData() { var mongoClient = new MongoClient("mongodb://*****:*****@"[^a-zA-Z]", " ").Trim().ToLowerInvariant().Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries).Where(d => d.Count() < 20).ToArray(); var trainingDataCollection = database.GetCollection <ProcessInfoLabeledItem>("training_data"); var records = trainingDataCollection.Find(Builders <ProcessInfoLabeledItem> .Filter.Empty).ToList(); var vocabulary = records.Select(c => c.Title + " " + c.Process).SelectMany(filter).Distinct().OrderBy(str => str).ToList(); List <string> x = records.Select(item => item.Title + " " + item.Process).ToList(); double[] y = records.Select(item => (double)item.Category).ToArray(); var problemBuilder = new TextClassificationProblemBuilder(); problemBuilder.RefineText = filter; var problem = problemBuilder.CreateProblem(x, y, vocabulary.ToList()); const int C = 1; var model = new C_SVC(problem, KernelHelper.LinearKernel(), C); var _predictionDictionary = new Dictionary <Karma, string> { { Karma.Bad, "Bad" }, { Karma.Good, "Good" }, { Karma.Neutral, "Neutral" } }; var newXs = database.GetCollection <AppUsageRecord>("daily_records").Find(Builders <AppUsageRecord> .Filter.Eq(f => f.Id, AppUsageRecord.GetGeneratedId(DateTime.Now))).FirstOrDefault().ActiveApps.Select(c => c.Value).Select(c => c.MainWindowTitle + " " + c.ProcessName); foreach (var _x in newXs) { var newX = TextClassificationProblemBuilder.CreateNode(_x, vocabulary, problemBuilder.RefineText); var predictedY = model.Predict(newX); Console.WriteLine($"For title {_x}"); Console.WriteLine($"The prediction is {_predictionDictionary[(Karma)predictedY]}"); } }
private void TrainingData() { string dateFilePath = Path.Combine(Directory.GetCurrentDirectory(), $"sunnyData.csv"); var dataTable = DataTable.New.ReadCsv(dateFilePath); List <string> x = dataTable.Rows.Select(row => row["Text"]).ToList(); double[] y = dataTable.Rows.Select(row => double.Parse(row["IsSunny"])).ToArray(); var vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(w => w).ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, vocabulary.ToList()); const int C = 1; var model = new C_SVC(problem, KernelHelper.LinearKernel(), C); string userInput; var _predictionDictionary = new Dictionary <int, string> { { -1, "Rainy" }, { 1, "Sunny" } }; do { userInput = Console.ReadLine(); var newX = TextClassificationProblemBuilder.CreateNode(userInput, vocabulary); var predictedY = model.Predict(newX); Console.WriteLine("The prediction is {0}", _predictionDictionary[(int)predictedY]); Console.WriteLine(new string('=', 50)); } while (userInput != "quit"); Console.WriteLine(""); }
/// <summary> /// Show how to get the accuracy using cross validation method /// Assert accurancy is greater than zero ///</summary> //[TestMethod()] public void DoCrossValidationTest() { var svm = new C_SVC(_prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); var cva = svm.GetCrossValidationAccuracy(5); Assert.IsTrue(cva > 0); }
public void C_SVC_Should_predict_perfectly_XOR_dataset_with_polynomial_kernel() { // note : K(u; v) = (u v + 1)^2 kernel is able to feet exactly the xor function // see http://www.doc.ic.ac.uk/~dfg/ProbabilisticInference/IDAPILecture18.pdf for more infos var svm = new C_SVC(xor_problem, KernelHelper.PolynomialKernel(2, 1, 1), 1); checkXOR(svm); }
public static double[,] Smoothing(double[,] img, double sigma) { var kernel = KernelHelper.MakeKernel((x, y) => Gaussian.Gaussian2D(x, y, sigma), KernelHelper.GetKernelSizeForGaussianSigma(sigma)); double[,] data = ConvolutionHelper.Convolve(img, kernel); return(data); }
public static Complex[,] EsimateH2(double[,] img, double Sigma1, double Sigma2) { var z = GetSquaredDerectionField(img, Sigma1); var kernel2 = KernelHelper.MakeComplexKernel((x, y) => Gaussian.Gaussian2D(x, y, Sigma2) * x, (x, y) => Gaussian.Gaussian2D(x, y, Sigma2) * (-y), KernelHelper.GetKernelSizeForGaussianSigma(Sigma2)); var I20 = ConvolutionHelper.ComplexConvolve(z, kernel2); return(I20); }
public void TestConvolutionForEvenSizedFilters() { var img = ImageHelper.LoadImage <double>(Resources.SampleFinger); var kernel = KernelHelper.MakeKernel((x, y) => 1, 4); var result = ConvolutionHelper.Convolve(img, kernel); ImageHelper.SaveArrayAndOpen(result); }
public static double[,] Reduce2(double[,] source, double factor) { var smoothed = ConvolutionHelper.Convolve(source, KernelHelper.MakeKernel( (x, y) => Gaussian.Gaussian2D(x, y, factor / 2d * 0.75d), KernelHelper.GetKernelSizeForGaussianSigma(factor / 2d * 0.75d))); var result = new double[(int)(source.GetLength(0) / factor), (int)(source.GetLength(1) / factor)]; Resize(smoothed, result, factor, (x, y) => Gaussian.Gaussian2D(x, y, factor / 2d * 0.75d)); return(result); }
public override IModelLikelihood <double, int> GenerateModelLikelihood(IDataSet <double, int> training_set) { svm_problem prob = new svm_problem(); prob.l = training_set.InstanceCount; prob.x = CreateNodeArray(ToolsCollection.ConvertToArray2D(training_set.FeatureData)); prob.y = ToolsCollection.ConvertToDoubleArray(ToolsCollection.ConvertToArray2D(training_set.LabelData).Select1DIndex1(0)); //Train model--------------------------------------------------------------------- return(new ModelLibSVMCSVC(training_set.DataContext, new C_SVC(prob, KernelHelper.RadialBasisFunctionKernel(this.Gamma), this.C, this.CacheSize, true))); }
static void Main(string[] args) { // STEP 4: Read the data const string dataFilePath = @"spamdata.csv"; var dataTable = DataTable.New.ReadCsv(dataFilePath); List <string> x = dataTable.Rows.Select(row => row["Text"]).ToList(); double[] y = dataTable.Rows.Select(row => double.Parse(row["IsSpam"])).ToArray(); var vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, vocabulary.ToList()); // If you want you can save this problem with : // ProblemHelper.WriteProblem(@"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.problem", problem); // And then load it again using: // var problem = ProblemHelper.ReadProblem(@"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.problem"); const int C = 1; var model = new C_SVC(problem, KernelHelper.LinearKernel(), C); var accuracy = model.GetCrossValidationAccuracy(10); Console.Clear(); Console.WriteLine(new string('=', 50)); Console.WriteLine("Accuracy of the model is {0:P}", accuracy); model.Export(string.Format(@"model_{0}_accuracy.model", accuracy)); Console.WriteLine(new string('=', 50)); Console.WriteLine("The model is trained. \r\nEnter a sentence to make a prediction. (ex: love hate dong)"); Console.WriteLine(new string('=', 50)); string userInput; //This just takes the predicted value (-1 to 3) and translates to your categorization response _predictionDictionary = new Dictionary <int, string> { { -2, "Angry" }, { -1, "Sad" }, { 0, "Normal" }, { 1, "Happy" }, { 2, "Love" } }; do { userInput = Console.ReadLine(); var newX = TextClassificationProblemBuilder.CreateNode(userInput, vocabulary); var predictedY = model.Predict(newX); Console.WriteLine("The prediction is {0} value is {1} ", _predictionDictionary[(int)predictedY], predictedY); Console.WriteLine(new string('=', 50)); } while (userInput != "quit"); Console.WriteLine(""); }
public static void LibSVM(List <string> inputData, List <string> testData) { var inputFilePath = @"D:\新西兰学习生活\大学上课\乐谱数据\input.txt"; var testFilePath = @"D:\新西兰学习生活\大学上课\乐谱数据\test.txt"; PrepareDataLibSvm(inputData, inputFilePath); PrepareDataLibSvm(testData, testFilePath); var _prob = ProblemHelper.ReadAndScaleProblem(inputFilePath); var svm = new C_SVC(_prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); }
public IActionResult RecommendedPlaces() { // Load the predifined data for smv algorithm var dataFilePath = "./wwwroot/svm/words.csv"; var dataTable = DataTable.New.ReadCsv(dataFilePath); var data = dataTable.Rows.Select(row => row["Text"]).ToList(); // Load classes (-1 or +1) var classes = dataTable.Rows.Select(row => double.Parse(row["IsRecommended"])) .ToArray(); // Get words var vocabulary = data.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); // Generate a svm problem var problem = CreateProblem(data, classes, vocabulary.ToList()); // Create and train a smv model const int C = 1; var model = new libsvm.C_SVC(problem, KernelHelper.LinearKernel(), C); var _predictionDictionary = new Dictionary <int, string> { { -1, "NotRecommended" }, { 1, "Recommended" } }; // Get all reviews var reviews = _context.Review.ToList(); // Get recommended reviews foreach (var review in reviews) { if (review.Content != null) { var node = CreateNode(review.Content, vocabulary); var prediction = model.Predict(node); review.IsRecommended = _predictionDictionary[(int)prediction] == "Recommended"; } else { review.IsRecommended = false; } } var recommendedReviews = reviews.Where(p => p.IsRecommended == true); foreach (var review in recommendedReviews) { review.Place = _context.Place.First(c => c.ID == review.PlaceID); review.Comments = _context.Comment.Where(c => c.ReviewID == review.ID).ToList(); } return(View(recommendedReviews.OrderByDescending(p => p.PublishDate))); }
/// <summary> /// Return double between 0 and 100. /// </summary> public static double GetBackgroundRercentage(double[,] img, int windowSize, double weight) { int badRegions = 0; //int mediumRegions = 0; int goodRegions = 0; double numberOfRegions; int[,] xGradients = OrientationFieldGenerator.GenerateXGradients(img.Select2D(a => (int)a)); int[,] yGradients = OrientationFieldGenerator.GenerateYGradients(img.Select2D(a => (int)a)); double[,] magnitudes = xGradients.Select2D( (value, x, y) => Math.Sqrt(xGradients[x, y] * xGradients[x, y] + yGradients[x, y] * yGradients[x, y])); double averege = KernelHelper.Average(magnitudes); double[,] window = new double[windowSize, windowSize]; N = (int)Math.Ceiling(((double)img.GetLength(0)) / windowSize); M = (int)Math.Ceiling(((double)img.GetLength(1)) / windowSize); numberOfRegions = N * M; for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { ///////////////////////////////////////////// window = window.Select2D((value, x, y) => { if (i * windowSize + x >= magnitudes.GetLength(0) || j * windowSize + y >= magnitudes.GetLength(1)) { return(0); } return(magnitudes[(int)(i * windowSize + x), (int)(j * windowSize + y)]); }); ////////////////////////////////////////// if (KernelHelper.Average(window) < averege * weight) { badRegions++; } else { goodRegions++; } } } return(((badRegions) / numberOfRegions) * 100); }
public static C_SVC CreateModel() { try { var prob = CreateProblem(X, Y, Vocabulary); const int C = 1; return(new C_SVC(prob, KernelHelper.LinearKernel(), C)); } catch (Exception ex) { return(null); } }
public static Complex[,] EstimatePS(double[,] img, double Sigma1, double Sigma2) { var z = GetSquaredDerectionField(img, Sigma1); var kernel2 = KernelHelper.MakeComplexKernel((x, y) => Gaussian.Gaussian2D(x, y, Sigma2) * x / (x == 0 && y == 0 ? 1 : Math.Sqrt(x * x + y * y)), (x, y) => Gaussian.Gaussian2D(x, y, Sigma2) * y / (x == 0 && y == 0 ? 1 : Math.Sqrt(x * x + y * y)), KernelHelper.GetKernelSizeForGaussianSigma(Sigma2)); var I20 = ConvolutionHelper.ComplexConvolve(z, kernel2); return(I20); }
public static Complex[,] GetSquaredDerectionField(double[,] img, double Sigma1) { var kernelX = KernelHelper.MakeKernel((x, y) => Gaussian.Gaussian2D(x, y, Sigma1) * x, KernelHelper.GetKernelSizeForGaussianSigma(Sigma1)); var resultX = ConvolutionHelper.Convolve(img, kernelX); var kernelY = KernelHelper.MakeKernel((x, y) => Gaussian.Gaussian2D(x, y, Sigma1) * -y, KernelHelper.GetKernelSizeForGaussianSigma(Sigma1)); var resultY = ConvolutionHelper.Convolve(img, kernelY); var preZ = KernelHelper.MakeComplexFromDouble(resultX, resultY); var z = preZ.Select2D(x => x * x); return(z); }
public void Create_Train_SVMmodel(string path_dataCSV_trainning, double C) { var dataTable = DataAccess.DataTable.New.ReadCsv(path_dataCSV_trainning); List <string> x = dataTable.Rows.Select(row => row["text"]).ToList(); double[] y = dataTable.Rows.Select(row => double.Parse(row["class"])).ToArray(); vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y.ToArray(), vocabulary.ToList()); model = new C_SVC(problem, KernelHelper.LinearKernel(), C); }
public static List <Minutia> ExtractMinutiae(double[,] imgBytes) { var lsEnhanced = SymmetryHelper.EstimateLS(imgBytes, sigma1, sigma2); var psEnhanced = SymmetryHelper.EstimatePS(imgBytes, sigma1, sigma2); //ImageHelper.SaveComplexArrayAsHSV(lsEnhanced,"C:\\temp\\lsenh.png"); //ImageHelper.SaveArray(NormalizeArray(psEnhanced.Select2D(x=>x.Magnitude)), "C:\\temp\\psenh.png"); var psi = KernelHelper.Zip2D(psEnhanced, lsEnhanced.Select2D(x => x.Magnitude), (x, y) => x * (1.0d - y)); return(SearchMinutiae(psi, lsEnhanced, psEnhanced)); }
public bool buildSVMCorpus(string filename) { string trainDataPath = filename + "TrainSVM.txt"; if (File.Exists(trainDataPath)) { _prob = ProblemHelper.ReadProblem(trainDataPath); _test = ProblemHelper.ScaleProblem(_prob); svm = new C_SVC(_test, KernelHelper.LinearKernel(), C); ProblemHelper.WriteProblem(filename + "output.txt", _test); fileExistance = true; } return(fileExistance); }
public static void Train() { DataHandler.ImportReviewData(3); var x = DataHandler.Reviews.Select(r => r.reviewText); double[] y = DataHandler.Reviews.Select(r => r.overall).ToArray(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, DataHandler.Vocabulary); const int C = 1; model = new C_SVC(problem, KernelHelper.LinearKernel(), C); }
public void Train() { SVMDataManager data = new SVMDataManager(); var problemBuilder = new SVMProblemBuilder(); var problem = problemBuilder.CreateProblem(data.RequestText, data.ClassValue, data.Vocabulary.ToList()); const double C = 0.5; C_SVC model = new C_SVC(problem, KernelHelper.LinearKernel(), C); // Train is called automatically here accuracy = model.GetCrossValidationAccuracy(100); model.Export(string.Format(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, string.Format(@"bin\model_{0}_accuracy.model", accuracy)))); System.IO.File.WriteAllLines(string.Format(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, string.Format(@"bin\model_{0}_vocabulary.txt", accuracy))), data.Vocabulary); }
public static void SVMPredict() { var svm = new C_SVC(prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); double accuracy = svm.GetCrossValidationAccuracy(nr_fold); for (int i = 0; i < test.l; i++) { svm_node[] x = test.x[i]; double y = test.y[i]; double predict = svm.Predict(x); // returns the predicted value 'y' Dictionary <int, double> probabilities = svm.PredictProbabilities(x); // returns the probabilities for each 'y' value Console.WriteLine(predict + " :" + probabilities[1]); } Console.ReadKey(); }