public void XORTest() { var range = Enumerable.Range(-10,16); var log2gammas = range.Select(i => Math.Pow(2, i)); var log2Cs = range.Select(i => Math.Pow(2, i + 1)); var log2Rs = range.Select(i => Math.Pow(2, i + 1)); var prob = ProblemHelper.ReadProblem(XOR_TRAINING_FILE); //Assert.IsTrue(prob.l == 4); Tuple<double, double, double, int> best = Tuple.Create(0.0, 0.0 ,0.0, prob.l); foreach (var g in log2gammas) { foreach (var c in log2Cs) { foreach (var r in log2Rs) { var svm = new C_SVC(prob, KernelHelper.SigmoidKernel(g,r), c); var errorCout = 0; for (int i = 0; i < prob.l; i++) { //var x = (prob.x[i].FirstOrDefault(xi => xi.index == 1) == null) ? 0.0 : prob.x[i].FirstOrDefault(xi => xi.index == 1).value; //var y = (prob.x[i].FirstOrDefault(xi => xi.index == 2) == null) ? 0.0 : prob.x[i].FirstOrDefault(xi => xi.index == 2).value; var z = svm.Predict(prob.x[i]); var probabilities = svm.PredictProbabilities(prob.x[i]); if (z != prob.y[i]) errorCout++; //Debug.WriteLine(String.Format("x={0} & y={1} => z={2} -- {3}", x, y, z, z == prob.y[i])); } if (errorCout < best.Item4) best = Tuple.Create(g, c, r, errorCout); //Debug.WriteLine(String.Format("g={0} && C={1} && C={2} => Error rate = {3}%", g, c, r, (double)errorCout / prob.l * 100)); } } } Debug.WriteLine(String.Format("BEST :: g={0} && C={1} && R={2} => Error rate = {3}%", best.Item1, best.Item2, best.Item3, (double)best.Item4 / (double)prob.l * 100)); }
static void Main(string[] args) { // reading the data const string dataFilePath = @"E:\training data\testdata.manual.2009.06.14.csv"; var dataTable = DataTable.New.ReadCsv(dataFilePath); var dataTablePhones = dataTable.Rows.Select(row => row).ToList(); //.Where(row => row["Tweet"].ToString().Contains("Phone")).ToList(); List<string> x = dataTablePhones.Select(row => row["Tweet"]).ToList(); double[] y = dataTablePhones.Select(row => double.Parse(row["Sentiment"])).ToArray(); // creating vocabulary var vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); // generating the problem var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, vocabulary.ToList()); //creating and training SVM model const int C = 1; var model = new C_SVC(problem, KernelHelper.LinearKernel(), C); // prediction string userInput; var _predictionDictionary = new Dictionary<int, string> { { 0, "Bad" }, { 4, "Good" }, {2, "Neutral"} }; do { userInput = Console.ReadLine(); var newX = TextClassificationProblemBuilder.CreateNode(userInput, vocabulary); var predictedY = model.Predict(newX); Console.WriteLine("{0}", _predictionDictionary[(int)predictedY]); Console.WriteLine(new string('=', 50)); } while (userInput != "quit"); }
/// <summary> /// Show how to get the accuracy using cross validation method /// Assert accurancy is greater than zero ///</summary> //[TestMethod()] public void DoCrossValidationTest() { var svm = new C_SVC(_prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); var cva = svm.GetCrossValidationAccuracy(5); Assert.IsTrue(cva > 0); }
public void DoCrossValidationTest() { var svm = new C_SVC(_prob, kernel, C); var cva = svm.GetCrossValidationAccuracy(5); Assert.IsTrue(cva > 0); }
public void C_SVC_Should_predict_perfectly_XOR_dataset_with_polynomial_kernel() { // note : K(u; v) = (u v + 1)^2 kernel is able to feet exactly the xor function // see http://www.doc.ic.ac.uk/~dfg/ProbabilisticInference/IDAPILecture18.pdf for more infos var svm = new C_SVC(xor_problem, KernelHelper.PolynomialKernel(2, 1, 1), 1); checkXOR(svm); }
public IActionResult RecommendedPlaces() { // Load the predifined data for smv algorithm var dataFilePath = "./wwwroot/svm/words.csv"; var dataTable = DataTable.New.ReadCsv(dataFilePath); var data = dataTable.Rows.Select(row => row["Text"]).ToList(); // Load classes (-1 or +1) var classes = dataTable.Rows.Select(row => double.Parse(row["IsRecommended"])) .ToArray(); // Get words var vocabulary = data.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); // Generate a svm problem var problem = CreateProblem(data, classes, vocabulary.ToList()); // Create and train a smv model const int C = 1; var model = new libsvm.C_SVC(problem, KernelHelper.LinearKernel(), C); var _predictionDictionary = new Dictionary <int, string> { { -1, "NotRecommended" }, { 1, "Recommended" } }; // Get all reviews var reviews = _context.Review.ToList(); // Get recommended reviews foreach (var review in reviews) { if (review.Content != null) { var node = CreateNode(review.Content, vocabulary); var prediction = model.Predict(node); review.IsRecommended = _predictionDictionary[(int)prediction] == "Recommended"; } else { review.IsRecommended = false; } } var recommendedReviews = reviews.Where(p => p.IsRecommended == true); foreach (var review in recommendedReviews) { review.Place = _context.Place.First(c => c.ID == review.PlaceID); review.Comments = _context.Comment.Where(c => c.ReviewID == review.ID).ToList(); } return(View(recommendedReviews.OrderByDescending(p => p.PublishDate))); }
static void Main() { // STEP 4: Read the data const string dataFilePath = @"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.csv"; var dataTable = DataTable.New.ReadCsv(dataFilePath); List<string> x = dataTable.Rows.Select(row => row["Text"]).ToList(); double[] y = dataTable.Rows.Select(row => double.Parse(row["IsSunny"])) .ToArray(); var vocabulary = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, vocabulary.ToList()); // If you want you can save this problem with : // ProblemHelper.WriteProblem(@"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.problem", problem); // And then load it again using: // var problem = ProblemHelper.ReadProblem(@"D:\MACHINE_LEARNING\SVM\Tutorial\sunnyData.problem"); const int C = 1; var model = new C_SVC(problem, KernelHelper.LinearKernel(), C); var accuracy = model.GetCrossValidationAccuracy(10); Console.Clear(); Console.WriteLine(new string('=', 50)); Console.WriteLine("Accuracy of the model is {0:P}", accuracy); model.Export(string.Format(@"D:\MACHINE_LEARNING\SVM\Tutorial\model_{0}_accuracy.model", accuracy)); Console.WriteLine(new string('=', 50)); Console.WriteLine("The model is trained. \r\nEnter a sentence to make a prediction. (ex: sunny rainy sunny)"); Console.WriteLine(new string('=', 50)); string userInput; _predictionDictionary = new Dictionary<int, string> { { -1, "Rainy" }, { 1, "Sunny" } }; do { userInput = Console.ReadLine(); var newX = TextClassificationProblemBuilder.CreateNode(userInput, vocabulary); var predictedY = model.Predict(newX); Console.WriteLine("The prediction is {0}", _predictionDictionary[(int)predictedY]); Console.WriteLine(new string('=', 50)); } while (userInput != "quit"); Console.WriteLine(""); }
public static void SVMPredict() { var svm = new C_SVC(prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); double accuracy = svm.GetCrossValidationAccuracy(nr_fold); for (int i = 0; i < test.l; i++) { svm_node[] x = test.x[i]; double y = test.y[i]; double predict = svm.Predict(x); // returns the predicted value 'y' Dictionary<int, double> probabilities = svm.PredictProbabilities(x); // returns the probabilities for each 'y' value Console.WriteLine(predict + " :" + probabilities[1]); } Console.ReadKey(); }
/// <summary> ///Show how to predict probabilities for classification problems ///Verify that the prediction is always the most probable class ///</summary> //[TestMethod()] public void PredictTest() { var svm = new C_SVC(_prob, KernelHelper.RadialBasisFunctionKernel(gamma), C); var nb_class = _prob.y.Distinct().Count(); for (int i = 0; i < _prob.l; i++) { var x = _prob.x[i]; var y = _prob.y[i]; var probabilities = svm.PredictProbabilities(x); var predict = svm.Predict(x); Assert.IsTrue(predict == probabilities.OrderByDescending(p => p.Value).First().Key); Assert.IsNotNull(probabilities); Assert.IsTrue(probabilities.Count == nb_class); var sum = probabilities.Sum(e => e.Value) ; } }
public void C_SVC_should_always_return_the_same_cross_validation_accuracy_when_probability_is_false() { // Arrange var problem = CreateSimpleProblem(); var model = new C_SVC(problem, KernelHelper.LinearKernel(), 1); // Act var results = new double[10]; for (int i = 0; i < 10; i++) { results[i] = model.GetCrossValidationAccuracy(10); } //Assert for (int i = 1; i < 10; i++) { Assert.AreEqual(0.90909090909090906, results[i]); } }
public void C_SVC_should_enable_to_export_and_import_svm_models() { // note : K(u; v) = (u v + 1)^2 kernel is able to feet exactly the xor function // see http://www.doc.ic.ac.uk/~dfg/ProbabilisticInference/IDAPILecture18.pdf for more infos var svm = new C_SVC(xor_problem, KernelHelper.PolynomialKernel(2, 1, 1), 1); var file_name = System.IO.Path.Combine(base_path, "test_export_temp.xml"); // make sure directory is clean if (File.Exists(file_name)) File.Delete(file_name); svm.Export(file_name); Assert.IsTrue(File.Exists(file_name)); var new_svm = new C_SVC(file_name); checkXOR(new_svm); File.Delete(file_name); // cleanup }
public void DoCrossValidationTest2() { var prob2 = ProblemHelper.ReadAndScaleProblem(SVMGUIDE1_TEST_FILE); var svm = new C_SVC(prob2, KernelHelper.RadialBasisFunctionKernel(3.0), 2.0); var cva = svm.GetCrossValidationAccuracy(5); }
public void SVMConstructorTest1() { var svm = new C_SVC(LEU_TEST_FILE, kernel, C); }
public void SVMConstructorTest() { var svm = new C_SVC(_prob, kernel, C); }
static void Main() { const string dataFilePath = @"C:\Users\Rory\Desktop\Han.csv"; List<String> negwords = new List<String>(); List<String> poswords = new List<String>(); sentC = new List<String>(); //String negFile = "C:/Users/Rory/Desktop/negative-words.txt"; //String posFile = "C:/Users/Rory/Desktop/positive-words.txt"; //GetNeg(negwords, negFile); //GetNeg(poswords, posFile); String[] stopwords = new String[]{"hon.","gentleman","member","friend","lady","a", "about", "above", "above", "across", "after", "afterwards", "again", "against", "all", "almost", "alone", "along", "already", "also","although","always","am","among", "amongst", "amoungst", "amount", "an", "and", "another", "any","anyhow","anyone","anything","anyway", "anywhere", "are", "around", "as", "at", "back","be","became", "because","become","becomes", "becoming", "been", "before", "beforehand", "behind", "being", "below", "beside", "besides", "between", "beyond", "bill", "both", "bottom","but", "by", "call", "can", "cannot", "cant", "co", "con", "could", "couldnt", "cry", "de", "describe", "detail", "do", "done", "down", "due", "during", "each", "eg", "eight", "either", "eleven","else", "elsewhere", "empty", "enough", "etc", "even", "ever", "every", "everyone", "everything", "everywhere", "except", "few", "fifteen", "fify", "fill", "find", "fire", "first", "five", "for", "former", "formerly", "forty", "found", "four", "from", "front", "full", "further", "get", "give", "go", "had", "has", "hasnt", "have", "he", "hence", "her", "here", "hereafter", "hereby", "herein", "hereupon", "hers", "herself", "him", "himself", "his", "how", "however", "hundred", "i","ie", "if", "in", "inc", "indeed", "interest", "into", "is", "it", "its", "itself", "keep", "last", "latter", "latterly", "least", "less", "ltd", "made", "many", "may", "me", "meanwhile", "might", "mill", "mine", "more", "moreover", "most", "mostly", "move", "much", "must", "my", "myself", "name", "namely", "neither", "never", "nevertheless", "next", "nine", "no", "nobody", "none", "noone", "nor", "not", "nothing", "now", "nowhere", "of", "off", "often", "on", "once", "one", "only", "onto", "or", "other", "others", "otherwise", "our", "ours", "ourselves", "out", "over", "own","part", "per", "perhaps", "please", "put", "rather", "re", "same", "see", "seem", "seemed", "seeming", "seems", "serious", "several", "she", "should", "show", "side", "since", "sincere", "six", "sixty", "so", "some", "somehow", "someone", "something", "sometime", "sometimes", "somewhere", "still", "such", "system", "take", "ten", "than", "that", "the", "their", "them", "themselves", "then", "thence", "there", "thereafter", "thereby", "therefore", "therein", "thereupon", "these", "they", "thickv", "thin", "third", "this", "those", "though", "three", "through", "throughout", "thru", "thus", "to", "together", "too", "top", "toward", "towards", "twelve", "twenty", "two", "un", "under", "until", "up", "upon", "us", "very", "via", "was", "we", "well", "were", "what", "whatever", "when", "whence", "whenever", "where", "whereafter", "whereas", "whereby", "wherein", "whereupon", "wherever", "whether", "which", "while", "whither", "who", "whoever", "whole", "whom", "whose", "why", "will", "with", "within", "without", "would", "yet", "you", "your", "yours", "yourself", "yourselves", "the"}; List<String> stop = stopwords.ToList<String>(); var dataTable = DataTable.New.ReadCsv(dataFilePath); List<string> x = dataTable.Rows.Select(row => row["Text"]).ToList(); double[] y = dataTable.Rows.Select(row => double.Parse(row["IsPos"])).ToArray(); //var vocab = x.SelectMany(GetWords).Distinct().OrderBy(word => word).ToList(); Bigram b = new Bigram(); List<String> v = new List<string>(); String sent = ""; for(int i = 0; i < x.Count; i++) { String c = x[i].ToString(); c = c.Replace(",", ""); c = c.ToLower(); String[] sp = c.Split(' '); for (int z = 0; z < sp.Length; z++) { String word = sp[z]; if(stop.Contains(word)) { Debug.WriteLine("Stop Word"); } else { sent += word; sent += " "; } } sent = sent.Trim(); v.AddRange(b.getNG(sent)); sent = ""; } // int bound = v.Count(); //v.AddRange(negwords); // int nBound = v.Count(); //v.AddRange(poswords); // v = v.Distinct().ToList(); var problemBuilder = new TextClassificationProblemBuilder(); var problem = problemBuilder.CreateProblem(x, y, v); ProblemHelper.WriteProblem(@"C:\Users\Rory\Desktop\hanData.problem", problem); problem = ProblemHelper.ReadProblem(@"C:\Users\Rory\Desktop\hanData.problem"); const int C = 1; var model = new C_SVC(problem, KernelHelper.LinearKernel(), C); var accuracy = model.GetCrossValidationAccuracy(10); Console.WriteLine("Accuracy of the model is {0:P}", accuracy); string userInput; _predictionDictionary = new Dictionary<int, string> { { -1, "Negative" }, { 1, "Positive" } }; getSent(); for (int i = 0; i < nText.Count; i++) { userInput = nText[i].ToString(); var newX = TextClassificationProblemBuilder.CreateNode(userInput, v); var predictedY = model.Predict(newX); Console.WriteLine("The prediction is {0}", _predictionDictionary[(int)predictedY]); String pred = _predictionDictionary[(int)predictedY]; Console.WriteLine(new string('=', 50)); sentC.Add(pred); } addToO(); getNumbers(); forPeople(); using(StreamWriter file = new StreamWriter(@"C:\Users\Rory\Desktop\Peop.csv")) { file.WriteLine("PersID,Name,Positive,Negative,Party"); for(int i = 0; i < pl.Count; i++) { String line = pl[i].getID().ToString() + "," + pl[i].getName().ToString() + "," + pl[i].getPos().ToString() + "," + pl[i].getNeg().ToString() + "," + pl[i].getParty().ToString(); //var json = JsonConvert.SerializeObject(pl[i]); file.WriteLine(line); } } using (StreamWriter file = new StreamWriter(@"C:\Users\Rory\Desktop\PositiveSent.csv")) { file.WriteLine("SpeakerID,TargetID,Text,Sentiment"); for (int i = 0; i < ps.Count; i++) { String line = ps[i].getSID().ToString() + "," + ps[i].getAID().ToString() + "," + ps[i].getText().ToString() + "," + ps[i].getSent().ToString(); file.WriteLine(line); } } using (StreamWriter file = new StreamWriter(@"C:\Users\Rory\Desktop\NegativeSent.csv")) { file.WriteLine("SpeakerID,TargetID,Text,Sentiment"); for (int i = 0; i < ns.Count; i++) { String line = ns[i].getSID().ToString() + "," + ns[i].getAID().ToString() + "," + ns[i].getText().ToString() + "," + ns[i].getSent().ToString(); file.WriteLine(line); } } ns.AddRange(ps); doSwap(); var json = "{\"nodes\":"; json += JsonConvert.SerializeObject(pl); json += ","; var edge = "\"edges\":"; edge += JsonConvert.SerializeObject(nss); edge += "}"; var fullJSON = json + edge; Debug.Write(fullJSON); using (StreamWriter file = new StreamWriter(@"C:\Users\Rory\Desktop\Nodes2.JSON")) { file.Write(fullJSON); } getRels(); List<Relationship> sl = rl.OrderBy(o=>o.source).ToList(); var edges = "\"links\":"; edges += JsonConvert.SerializeObject(sl); edges += "}"; var fullJ = json + edges; using (StreamWriter file = new StreamWriter(@"C:\Users\Rory\Desktop\Rels2.JSON")) { file.Write(fullJ); } }