public void runKNN() { // K=1 means Only its nearest neighbour will be used var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein()); // In order to compare strings, we will be using Levenshtein's string distance String[] trainingInput = trainingData.ToArray <String>("Output"); DataTable trainingsymbols = convertStringDataToDiscreteSymbol(); int[] trainingOutput = trainingsymbols.ToArray <int>("GeneratedByProgram"); // We learn the algorithm: knn.Learn(trainingInput, trainingOutput); // After the algorithm has been created, we can use it:` int answer = knn.Decide("Chars"); // answer should be 1. DataTable testdata = new DataTable("Sample Data"); testdata.Columns.Add("Output", "GeneratedByProgram"); testdata.Rows.Add("a8", "Yes"); testdata.Rows.Add("b5", "Yes"); testdata.Rows.Add("This is real", "No"); testdata.Rows.Add("a9", "Yes"); testdata.Rows.Add("b15", "Yes"); testdata.Rows.Add("b15", "Yes"); testdata.Rows.Add("b18", "Yes"); testdata.Rows.Add("b200", "Yes"); testdata.Rows.Add("b17", "Yes"); testdata.Rows.Add("b62", "Yes"); testdata.Rows.Add("b90", "Yes"); testdata.Rows.Add("b123", "Yes"); testdata.Rows.Add("This is Ok", "Yes"); testdata.Rows.Add("b1", "Yes"); testdata.Rows.Add("b64", "Yes"); testdata.Rows.Add("I am god", "No"); testdata.Rows.Add("b14", "Yes"); testdata.Rows.Add("b1", "Yes"); testdata.Rows.Add("b64", "Yes"); testdata.Rows.Add("b100000000000", "Yes"); testForInstance(knn, "b15", "Yes"); DataTable testsymbols = codebook.Apply(testdata); String[] testInput = testdata.ToArray <String>("Output"); int[] testOutput = testsymbols.ToArray <int>("GeneratedByProgram"); int[] answers = knn.Decide(testInput); // answer should be 1. Console.WriteLine("\n Accuracy (Tested on 20 data set): " + calculateAccuracy(answers, testOutput)); }
public string CalculateKNN(double [] data) { string answer = ""; var knn = new KNearestNeighbors(k: 3); knn.Learn(Property.inputs, Property.outputs); try { if (knn.Decide(data) == 1) { answer = "False"; } else { answer = "True"; } } catch { MessageBox.Show("데이터 포멧 확인"); } return(answer); }
public void foo() { String path = Environment.CurrentDirectory + "\\example.xlsx"; // Read the Excel worksheet into a DataTable DataTable table = new ExcelReader(path).GetWorksheet("T1"); //Convert the DataTable to input and output vectors String[] trainingInputs = table.Columns["Output"].ToArray <String>(); // Create a new codification codebook to //convert strings into discrete symbols Codification codebook = new Codification(table, "GeneratedByProgram"); // Extract input and output pairs to train DataTable symbols = codebook.Apply(table); int[] trainingOutputs = symbols.ToArray <int>("GeneratedByProgram"); var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein()); // In order to compare strings, we will be using Levenshtein's string distance // We learn the algorithm: knn.Learn(trainingInputs, trainingOutputs); int answer = knn.Decide("Chars"); }
//public static string GetRoom(KNearestNeighbors knn, double[] coordinates) //{ // // After the algorithm has been created, we can classify a new instance: // Console.WriteLine("Room: " + getRoomname(knn.Decide(coordinates))); // return getRoomname(knn.Decide(coordinates)); //} public static Dictionary <string, double> getOptions(double[] coordinates, KNearestNeighbors Knn, Dictionary <int, string> labelMap) { Dictionary <string, int> optionDict = new Dictionary <string, int>(); int[] list = labelMap.Keys.ToArray(); int length = Knn.GetNearestNeighbors(coordinates, out list).Length; foreach (double[] g in Knn.GetNearestNeighbors(coordinates, out list)) { string roomname = getRoomname(Knn.Decide(g), labelMap); if (optionDict.Keys.Contains(roomname)) { optionDict[roomname] += 1; } else { optionDict.Add(roomname, 1); } } Dictionary <string, double> options = new Dictionary <string, double>(); foreach (KeyValuePair <string, int> option in optionDict) { double factor = (double)1 / (double)length; double value = option.Value * factor; options.Add(option.Key, value); } return(options); }
public void learn_test() { #region doc_learn_distance // Create some sample learning data. In this data, // the first two instances belong to a class, the // four next belong to another class and the last // three to yet another. double[][] inputs = { // The first two are from class 0 new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, // The next four are from class 1 new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, // The last three are from class 2 new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; int[] outputs = { 0, 0, // First two from class 0 1, 1, 1, 1, // Next four from class 1 2, 2, 2 // Last three from class 2 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 4. This means that, for a given // instance, its nearest 4 neighbors will be used to cast a decision. var knn = new KNearestNeighbors <double[]>(k: 4, distance: new SquareEuclidean()); // We learn the algorithm: knn.Learn(inputs, outputs); // After the algorithm has been created, we can classify a new instance: int answer = knn.Decide(new double[] { 11, 5, 4 }); // answer will be 2. // Let's say we would like to compute the error matrix for the classifier: var cm = GeneralConfusionMatrix.Estimate(knn, inputs, outputs); // We can use it to estimate measures such as double error = cm.Error; // should be 0 double acc = cm.Accuracy; // should be 1 double kappa = cm.Kappa; // should be 1 #endregion Assert.AreEqual(2, answer); Assert.AreEqual(0, error); Assert.AreEqual(1, acc); Assert.AreEqual(1, kappa); }
public void testForInstance(KNearestNeighbors <string> knn, String input, String Output) { // Obtain the numeric output that represents the answer int c = knn.Decide(input); // answer will be 0 // Now let us convert the numeric output to an actual "Yes" or "No" answer string result = codebook.Revert("GeneratedByProgram", c); // answer will be "No" Console.WriteLine("Test Data Input : " + input + "\nExpectation: " + Output + "\nResult: " + result); }
public void KNearestNeighbors(out int answer) { int[] y = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; int kn = Convert.ToInt32(textBox1.Text); var knn = new KNearestNeighbors(k: kn); knn.Learn(train_emfcc, y); answer = knn.Decide(mfcc_earr); label6.Text += answer.ToString(); }
public static List <string> getOptions(double[] coordinates, KNearestNeighbors knn) { List <string> options = new List <string>(); Dictionary <int, string> labelMap = Fingerprinting.ReadLabelMap(); int[] list = labelMap.Keys.ToArray(); foreach (double[] g in knn.GetNearestNeighbors(coordinates, out list)) { options.Add(getRoomname(knn.Decide(g))); } return(options); }
public void learn_string() { #region doc_learn_text // The k-Nearest Neighbors algorithm can be used with // any kind of data. In this example, we will see how // it can be used to compare, for example, Strings. string[] inputs = { "Car", // class 0 "Bar", // class 0 "Jar", // class 0 "Charm", // class 1 "Chair" // class 1 }; int[] outputs = { 0, 0, 0, // First three are from class 0 1, 1, // And next two are from class 1 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 1. This means that, for a given // instance, only its nearest neighbor will be used to cast a new // decision. // In order to compare strings, we will be using Levenshtein's string distance var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein()); // We learn the algorithm: knn.Learn(inputs, outputs); // After the algorithm has been created, we can use it: int answer = knn.Decide("Chars"); // answer should be 1. // Let's say we would like to compute the error matrix for the classifier: var cm = ConfusionMatrix.Estimate(knn, inputs, outputs); // We can use it to estimate measures such as double error = cm.Error; // should be 0 double acc = cm.Accuracy; // should be 1 double kappa = cm.Kappa; // should be 1 #endregion Assert.AreEqual(1, answer); Assert.AreEqual(0, error); Assert.AreEqual(1, acc); Assert.AreEqual(1, kappa); }
private Events CalcRecommendedEventForUser() { const int kNeighbors = 1; var knn = new KNearestNeighbors <double[]>(kNeighbors, distance: new Accord.Math.Distances.SquareEuclidean()); if (!HttpContext.Session.TryGetValue("UserName", out var userNameNotEncoded)) { return(null); } var userName = System.Text.Encoding.UTF8.GetString(userNameNotEncoded); var usersEvents = _context.EventToUser.Include(evToUser => evToUser.Event).Include(evToUser => evToUser.EventUserNameNavigation).OrderBy(s => s.EventUserNameNavigation.BlogUserAge); LinkedList <double[]> usersAge = new LinkedList <double[]>(); LinkedList <int> eventIds = new LinkedList <int>(); foreach (var userEvent in usersEvents.OrderBy(userEv => userEv.EventId)) { usersAge.AddLast(new double[] { Convert.ToDouble(userEvent.EventUserNameNavigation.BlogUserAge) }); eventIds.AddLast(userEvent.EventId); } var inputs = usersAge.Select(user => user.ToArray()).ToArray(); if (inputs.Length <= 1) { return(null); } var outputs = eventIds.ToArray(); knn.Learn(inputs, outputs); var currUserObj = _context.Users.First(users => users.BlogUserName == userName); var currUserAge = new double[] { currUserObj.BlogUserAge }; int decision; try { decision = knn.Decide(currUserAge); } catch (Exception) { return(null); } var decidedEvent = _context.Events.First(someEvent => someEvent.Id == decision); return(decidedEvent); }
/// <summary> /// Classify our data using k-nearest neighbors classifer and save the model. /// </summary> /// <param name="train_data">Frame objects that we will use to train classifers.</param> /// <param name="test_data">Frame objects that we will use to test classifers.</param> /// <param name="train_label">Labels of the train data.</param> /// <param name="test_label">Labels of the test data.</param> /// <param name="Classifier_Path">Path where we want to save the classifer on the disk.</param> /// <param name="Classifier_Name">Name of the classifer we wnat to save.</param> /// <returns></returns> public void Knn(double[][] train_data, double[][] test_data, int[] train_label, int[] test_label, String Classifier_Path, String Classifier_Name) { KNearestNeighbors knn = new KNearestNeighbors(k: 5); knn.Learn(train_data, train_label); int answer = knn.Decide(new double[] { 117.07004523277283, 119.9104585647583 }); var cm = GeneralConfusionMatrix.Estimate(knn, test_data, test_label); double error = cm.Error; Console.WriteLine(error); knn.Save(Path.Combine(Classifier_Path, Classifier_Name)); }
public string knn() { double[][] inputs = { // The first two are from class 0 new double[] { 10 }, // The next four are from class 1 new double[] { 30 }, // The last three are from class 2 new double[] { 50 }, }; int[] outputs = { 0, // First two from class 0 1, // Next four from class 1 2, // Last three from class 2 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 4. This means that, for a given // instance, its nearest 4 neighbors will be used to cast a decision. var knn = new KNearestNeighbors(k: 1); // We learn the algorithm: knn.Learn(inputs, outputs); //put a diffoult video if (!User.Identity.IsAuthenticated) { return("instegram.mp4"); } int customerId = int.Parse(User.Claims.FirstOrDefault(claim => claim.Type == ClaimTypes.Sid).Value); int age = _context.Customers.Where(a => a.ID == customerId).Select(a => a.BirthDate.Year).Single(); // After the algorithm has been created, we can classify a new instance: int answer = knn.Decide(new double[] { (DateTime.Now.Year - age) }); // answer will be 2. if (answer == 0) { return("bracletRing.mp4"); } if (answer == 1) { return("instegram.mp4"); } return("bracletRing2.mp4"); }
public void learn_test() { #region doc_learn_distance // Create some sample learning data. In this data, // the first two instances belong to a class, the // four next belong to another class and the last // three to yet another. double[][] inputs = { // The first two are from class 0 new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, // The next four are from class 1 new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, // The last three are from class 2 new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; int[] outputs = { 0, 0, // First two from class 0 1, 1, 1, 1, // Next four from class 1 2, 2, 2 // Last three from class 2 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 4. This means that, for a given // instance, its nearest 4 neighbors will be used to cast a decision. var knn = new KNearestNeighbors <double[]>(k: 4, distance: new SquareEuclidean()); // We learn the algorithm: knn.Learn(inputs, outputs); // After the algorithm has been created, we can classify a new instance: int answer = knn.Decide(new double[] { 11, 5, 4 }); // answer will be 2. #endregion Assert.AreEqual(2, answer); }
public string classifyGesture(double[] inputVector) { int gestureClassLabel = classifier.Decide(inputVector); if (calculateAccuracy) { testInputs.Add(inputVector); if (testInputs.Count == testOutputs.Count) { calculateConfusionMatrix(); } } return(dataService.classLabelToGesture(gestureClassLabel)); }
public void learn_string() { #region doc_learn_text // The k-Nearest Neighbors algorithm can be used with // any kind of data. In this example, we will see how // it can be used to compare, for example, Strings. string[] inputs = { "Car", // class 0 "Bar", // class 0 "Jar", // class 0 "Charm", // class 1 "Chair" // class 1 }; int[] outputs = { 0, 0, 0, // First three are from class 0 1, 1, // And next two are from class 1 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 1. This means that, for a given // instance, only its nearest neighbor will be used to cast a new // decision. // In order to compare strings, we will be using Levenshtein's string distance var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein()); // We learn the algorithm: knn.Learn(inputs, outputs); // After the algorithm has been created, we can use it: int answer = knn.Decide("Chars"); // answer should be 1. #endregion Assert.AreEqual(1, answer); }
private static void knn(double[][] inputs, int[] outputs) { // Create a new k-NN algorithm: var knn = new KNearestNeighbors() { K = 3, // base a decision on the class labels of the three nearest neighbors of the query point Distance = new Euclidean() // actually the default }; // Learn a k-NN classifier knn = knn.Learn(inputs, outputs); // Get predictions according to kNN int[] predicted = knn.Decide(inputs); // Create a confusion matrix to check the quality of the predictions: var cm = new ConfusionMatrix(predicted: predicted, expected: outputs); // Check the accuracy measure: double accuracy = cm.Accuracy; // (should be 1.0 or 100%) }
public void Main() { WriteLine("Execution begins..."); var fn = @"c:\DEMO\Data\train.csv"; var f = File.ReadLines(fn); var data = from z in f.Skip(1) let zz = z.Split(',').Select(int.Parse) select new Digit { Label = zz.First(), Image = zz.Skip(1).ToArray() }; var train = data.Take(10000).ToArray(); var test = data.Skip(10000).Take(1000).ToArray(); var classifier = new KNearestNeighbors(1); classifier.Learn( (from x in train select x.Image.Select(z => (double)z).ToArray()).ToArray(), (from x in train select x.Label).ToArray()); int count = 0, correct = 0; foreach (var z in test) { var n = classifier.Decide(z.Image.Select(t => (double)t).ToArray()); WriteLine("{0} => {1}", z.Label, n); if (z.Label == n) { correct++; } count++; } WriteLine("Done, {0} of {1} correct ({2}%)", correct, count, (double)correct / (double)count * 100); ReadKey(); }
public int DoSimplePrediction(double[][] inputs, int output) { Dataset simpleTrainedDataset = new Dataset(Constants.Constants.SimpleTrainedDataFilePath); double[][] simpleTrainedDatasetInputs = simpleTrainedDataset.Instances; int[] simpleTrainedDatasetOutputs = simpleTrainedDataset.ClassLabels; var knn = new KNearestNeighbors() { K = 5, Distance = new Euclidean() }; knn = knn.Learn(simpleTrainedDatasetInputs, simpleTrainedDatasetOutputs); int[] predicted = knn.Decide(inputs); return(predicted .GroupBy(_ => _) .OrderByDescending(_ => _.Count()) .Select(_ => _.Key) .First()); }
private static void KNNCompute(int K, double[][] inputs, int[] outputs, double[][] test, int[] answer, List <string> testData, System.IO.StreamWriter fw) { var knn = new KNearestNeighbors(K); knn.Learn(inputs, outputs); //测试 int i = 0; double accuracy; int correctCount = 0; foreach (var testDetail in test) { var predict = knn.Decide(testDetail); //fw.WriteLine($"歌曲:{testData[i].Split(',')[0]}, 正确答案是{answer[i]}, KNN(K={K}认为):{predict}"); if (answer[i] == predict) { correctCount++; } i++; } accuracy = (double)correctCount / (double)test.Count(); fw.WriteLine($"KNN(K={K})的正确率:" + accuracy); }
public string RecommendedCheckupbyAge(int age) { //int bloodCounter = 0, ctCounter = 0, MRICounter = 0, fecesCounter = 0, urineCounter = 0; // Create some sample learning data. In this data, // the first two instances belong to a class, the // four next belong to another class and the last // three to yet another. List <double[]> inputs = new List <double[]>(); List <int> outputs = new List <int>(); List <string> types = new List <string>(); int classcounter = 0; Dictionary <int, string> docs = new Dictionary <int, string>(); foreach (Checkup checkup in db.Checkups.ToList()) { if (!types.Contains(checkup.Type)) { types.Add(checkup.Type); } } foreach (string type in types) { foreach (Checkup checkup in db.Checkups.ToList()) { if (checkup.Type == type) { outputs.Add(classcounter); Patient p = db.Patients.Find(checkup.Patient_ID); inputs.Add(new double[] { (p.Age) }); } } docs[classcounter] = type; classcounter++; } /*foreach (Checkup checkup in db.Checkups.ToList()) * { * if (checkup.Result) * { * if (checkup.Type == "C.T") * { * ctCounter++; * Patient p = db.Patients.Find(checkup.Patient_ID); * inputs.Add(new double[] { (p.Age) }); * } * * //Patient p = db.Patients.Find(checkup.Patient_ID); * * * } * } * foreach (Checkup checkup in db.Checkups.ToList()) * { * if (checkup.Result) * { * if (checkup.Type == "MRI") * { * MRICounter++; * Patient p = db.Patients.Find(checkup.Patient_ID); * inputs.Add(new double[] { (p.Age) }); * } * * //Patient p = db.Patients.Find(checkup.Patient_ID); * * * } * } * foreach (Checkup checkup in db.Checkups.ToList()) * { * if (checkup.Result) * { * if (checkup.Type == "Feces") * { * fecesCounter++; * Patient p = db.Patients.Find(checkup.Patient_ID); * inputs.Add(new double[] { (p.Age) }); * } * * //Patient p = db.Patients.Find(checkup.Patient_ID); * * * } * } * * foreach (Checkup checkup in db.Checkups.ToList()) * { * if (checkup.Result) * { * if (checkup.Type == "Urine") * { * urineCounter++; * Patient p = db.Patients.Find(checkup.Patient_ID); * inputs.Add(new double[] { (p.Age) }); * } * * //Patient p = db.Patients.Find(checkup.Patient_ID); * * * } * }*/ double[][] inputs1 = inputs.ToArray(); // The first two are from class Blood=0 // The next four are from class C.T=1 // The last three are from class MRI=2 /*int totalCounters = bloodCounter + ctCounter + MRICounter + fecesCounter + urineCounter; * * for (int i = 0; i < totalCounters; i++) * { * if (bloodCounter != 0) * { * outputs.Add(0); * bloodCounter--; * * } * else if (ctCounter != 0) * { * outputs.Add(1); * ctCounter--; * } * else if (MRICounter != 0) * { * outputs.Add(2); * MRICounter--; * } * else if (fecesCounter != 0) * { * outputs.Add(3); * fecesCounter--; * } * else if (urineCounter != 0) * { * outputs.Add(4); * urineCounter--; * } * * * }*/ int[] outputs1 = outputs.ToArray(); // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 4. This means that, for a given // instance, its nearest 4 neighbors will be used to cast a decision. var knn = new KNearestNeighbors(k: 1, distance: new Manhattan()); // We learn the algorithm: knn.Learn(inputs1, outputs1); // After the algorithm has been created, we can classify a new instance: int answer = knn.Decide(new double[] { age }); // answer will be 2. return(docs[answer]); }
public string RecommendedDocbyAge(int age) { //int amirCounter = 0, inonCounter = 0, orCounter = 0, maayanCounter = 0; // Create some sample learning data. In this data, // the first two instances belong to a class, the // four next belong to another class and the last // three to yet another. List <double[]> inputs = new List <double[]>(); List <int> outputs = new List <int>(); List <int> countersdoctors = new List <int>(); int classcounter = 0; Dictionary <int, string> docs = new Dictionary <int, string>(); foreach (Doctor doctor in db.Doctors.ToList()) { foreach (Appointment app in db.Appointments.ToList()) { if (app.Doctor_ID == doctor.ID) { outputs.Add(classcounter); //amirCounter++; Patient p = db.Patients.Find(app.Patient_ID); inputs.Add(new double[] { (p.Age) }); } } docs[classcounter] = doctor.FirstName + ' ' + doctor.LastName; classcounter++; //Patient p = db.Patients.Find(checkup.Patient_ID); } /* foreach (Appointment app in db.Appointments.ToList()) * { * * if (app.Doctor_ID == "987654321") * { * inonCounter++; * Patient p = db.Patients.Find(app.Patient_ID); * inputs.Add(new double[] { (p.Age) }); * * } * * //Patient p = db.Patients.Find(checkup.Patient_ID); * * * } * * foreach (Appointment app in db.Appointments.ToList()) * { * * if (app.Doctor_ID == "543216789") * { * orCounter++; * Patient p = db.Patients.Find(app.Patient_ID); * inputs.Add(new double[] { (p.Age) }); * * } * * //Patient p = db.Patients.Find(checkup.Patient_ID); * * * } * * foreach (Appointment app in db.Appointments.ToList()) * { * * if (app.Doctor_ID == "098765432") * { * maayanCounter++; * Patient p = db.Patients.Find(app.Patient_ID); * inputs.Add(new double[] { (p.Age) }); * * } * * //Patient p = db.Patients.Find(checkup.Patient_ID); * * * }*/ double[][] inputs1 = inputs.ToArray(); // The first two are from class Blood=0 // The next four are from class C.T=1 // The last three are from class MRI=2 /*int totalCounters = amirCounter + inonCounter + orCounter + maayanCounter; * * for (int i = 0; i < totalCounters; i++) * { * if (amirCounter != 0) * { * outputs.Add(0); * amirCounter--; * * } * else if (inonCounter != 0) * { * outputs.Add(1); * inonCounter--; * } * else if (orCounter != 0) * { * outputs.Add(2); * orCounter--; * } * else if (maayanCounter != 0) * { * outputs.Add(3); * maayanCounter--; * } * * * * }*/ int[] outputs1 = outputs.ToArray(); // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 4. This means that, for a given // instance, its nearest 4 neighbors will be used to cast a decision. var knn = new KNearestNeighbors(k: 1, distance: new Manhattan()); // We learn the algorithm: knn.Learn(inputs1, outputs1); // After the algorithm has been created, we can classify a new instance: //Enter age here and get doctor by class number int answer = knn.Decide(new double[] { age }); return(docs[answer]); }
public void learn_string() { string basePath = NUnit.Framework.TestContext.CurrentContext.TestDirectory; #region doc_learn_text // The k-Nearest Neighbors algorithm can be used with // any kind of data. In this example, we will see how // it can be used to compare, for example, Strings. string[] inputs = { "Car", // class 0 "Bar", // class 0 "Jar", // class 0 "Charm", // class 1 "Chair" // class 1 }; int[] outputs = { 0, 0, 0, // First three are from class 0 1, 1, // And next two are from class 1 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 1. This means that, for a given // instance, only its nearest neighbor will be used to cast a new // decision. // In order to compare strings, we will be using Levenshtein's string distance var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein()); // We learn the algorithm: knn.Learn(inputs, outputs); // After the algorithm has been created, we can use it: int answer = knn.Decide("Chars"); // answer should be 1. // Let's say we would like to compute the error matrix for the classifier: var cm = ConfusionMatrix.Estimate(knn, inputs, outputs); // We can use it to estimate measures such as double error = cm.Error; // should be 0 double acc = cm.Accuracy; // should be 1 double kappa = cm.Kappa; // should be 1 #endregion Assert.AreEqual(1, answer); Assert.AreEqual(0, error); Assert.AreEqual(1, acc); Assert.AreEqual(1, kappa); #if !NO_BINARY_SERIALIZATION knn.Save(Path.Combine(basePath, "string_knn.bin")); var loaded_knn = Serializer.Load <KNearestNeighbors <string> >(Path.Combine(basePath, "string_knn.bin")); Assert.AreEqual(1, loaded_knn.Decide("Chars")); cm = ConfusionMatrix.Estimate(loaded_knn, inputs, outputs); Assert.AreEqual(0, cm.Error); Assert.AreEqual(1, cm.Accuracy); Assert.AreEqual(1, cm.Kappa); Assert.AreEqual(knn.ClassCount, loaded_knn.ClassCount); Assert.AreEqual(knn.Distance, loaded_knn.Distance); Assert.AreEqual(knn.K, loaded_knn.K); Assert.AreEqual(knn.NumberOfClasses, loaded_knn.NumberOfClasses); Assert.AreEqual(knn.NumberOfInputs, loaded_knn.NumberOfInputs); Assert.AreEqual(knn.NumberOfOutputs, loaded_knn.NumberOfOutputs); Assert.AreEqual(knn.Outputs, loaded_knn.Outputs); Assert.AreEqual(knn.Token, loaded_knn.Token); #endif }
public static string GetRoom(KNearestNeighbors knn, double[] coordinates) { // After the algorithm has been created, we can classify a new instance: Console.WriteLine("Room: " + getRoomname(knn.Decide(coordinates))); return(getRoomname(knn.Decide(coordinates))); }
public void learn_test1() { string basePath = NUnit.Framework.TestContext.CurrentContext.TestDirectory; #region doc_learn // Create some sample learning data. In this data, // the first two instances belong to a class, the // four next belong to another class and the last // three to yet another. double[][] inputs = { // The first two are from class 0 new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, // The next four are from class 1 new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, // The last three are from class 2 new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; int[] outputs = { 0, 0, // First two from class 0 1, 1, 1, 1, // Next four from class 1 2, 2, 2 // Last three from class 2 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 4. This means that, for a given // instance, its nearest 4 neighbors will be used to cast a decision. var knn = new KNearestNeighbors(k: 4); // We learn the algorithm: knn.Learn(inputs, outputs); // After the algorithm has been created, we can classify a new instance: int answer = knn.Decide(new double[] { 11, 5, 4 }); // answer will be 2. // Let's say we would like to compute the error matrix for the classifier: var cm = GeneralConfusionMatrix.Estimate(knn, inputs, outputs); // We can use it to estimate measures such as double error = cm.Error; // should be double acc = cm.Accuracy; // should be double kappa = cm.Kappa; // should be #endregion Assert.AreEqual(2, answer); Assert.AreEqual(0, error); Assert.AreEqual(1, acc); Assert.AreEqual(1, kappa); #if !NO_BINARY_SERIALIZATION #region doc_serialization // After we have created and learned our model, let's say we would // like to save it to disk. For this, we can import the Accord.IO // namespace at the top of our source file namespace, and then use // Serializer's extension method Save: // Save to a file called "knn.bin" in the basePath directory: knn.Save(Path.Combine(basePath, "knn.bin")); // To load it back from the disk, we might need to use the Serializer class directly: var loaded_knn = Serializer.Load <KNearestNeighbors>(Path.Combine(basePath, "knn.bin")); // At this point, knn and loaded_knn should be // two different instances of identical objects. #endregion // Make sure the loaded classifier is still working Assert.AreEqual(2, loaded_knn.Decide(new double[] { 11, 5, 4 })); cm = GeneralConfusionMatrix.Estimate(loaded_knn, inputs, outputs); Assert.AreEqual(0, cm.Error); Assert.AreEqual(1, cm.Accuracy); Assert.AreEqual(1, cm.Kappa); Assert.AreEqual(knn.ClassCount, loaded_knn.ClassCount); Assert.AreEqual(knn.Distance, loaded_knn.Distance); Assert.AreEqual(knn.K, loaded_knn.K); Assert.AreEqual(knn.NumberOfClasses, loaded_knn.NumberOfClasses); Assert.AreEqual(knn.NumberOfInputs, loaded_knn.NumberOfInputs); Assert.AreEqual(knn.NumberOfOutputs, loaded_knn.NumberOfOutputs); Assert.AreEqual(knn.Outputs, loaded_knn.Outputs); Assert.AreEqual(knn.Token, loaded_knn.Token); #endif }
protected override SmartphonePositions ClassifyMove(double[] dataToClassify) { return((SmartphonePositions)classifer.Decide(dataToClassify)); }
private int?getRecommendedDestinationIdForCurrentUser() { // Now we will create the K-Nearest Neighbors algorithm. For a given // instance, its nearest 3 neighbors will be used to cast a decision. const int k = 3; var knn = new KNearestNeighbors <double[]>(k, distance: new SquareEuclidean()); var boughtTickets = _context.Ticket.Join(_context.User, ticket => ticket.Buyer.ID, user => user.ID, (ticket, user) => ticket) .Where(ticket => ticket.Buyer != null); var ticketAndDestinationAirportList = boughtTickets .Join(_context.Flight, ticket => ticket.Flight.Id, flight => flight.Id, (ticket, flight) => new { ticket, flight }) .Join(_context.Airport, ticketAndFlight => ticketAndFlight.flight.DestAirport.ID, airport => airport.ID, (ticketAndFlight, destAirport) => new { ticketAndFlight.ticket, destAirport }) .Select(ticketAndDestAirport => new { ticketAndDestAirport.ticket.Buyer, ticketAndDestAirport.destAirport }); // Check that we have enough data (less then k will throw exption) if (ticketAndDestinationAirportList.Count() <= k) { return(null); } LinkedList <double[]> usersAge = new LinkedList <double[]>(); LinkedList <int> destAirportsId = new LinkedList <int>(); foreach (var item in ticketAndDestinationAirportList) { double[] userAgeArray = new double[] { item.Buyer.Age }; usersAge.AddLast(userAgeArray); destAirportsId.AddLast(item.destAirport.ID); } double[][] inputs = usersAge.Select(a => a.ToArray()).ToArray(); int[] outputs = destAirportsId.ToArray(); // Learning given inputs of ages of users and outputs of destination flights of them knn.Learn(inputs, outputs); // Get the current login user info int?currentUserId = HttpContext.Session.GetInt32("UserId"); if (currentUserId != null) { var currentUser = _context.User.FirstOrDefault(u => u.ID == currentUserId); if (currentUser != null) { // Decide where the current user would like to travel to double[] currentUserAge = new double[] { currentUser.Age }; return(knn.Decide(currentUserAge)); } } return(null); }
public JsonResult PredictPossibleProducts() { var userId = 0; int knnNum = 5; int clusterNum = 4; var userIdString = ""; if (HttpContext.Session["userid"] == null) { return(Json(new { errorCode = 1, errorMessage = "יוזר לא חוקי" })); } userIdString = HttpContext.Session["userid"].ToString(); var didParsed = Int32.TryParse(userIdString, out userId); if (!didParsed) { return(Json(new { errorCode = 1, errorMessage = "יוזר לא חוקי" })); } var userGender = _context.Users .Where(x => x.Id == userId) .Select(x => x.Gender) .SingleOrDefault(); var trainData = _context.Purchases .OrderBy(x => x.UserId) .Where(x => x.Product != null) .Select(x => new { userId = x.UserId.Value, size = x.Product.Size, type = x.Product.ProductTypeId, gender = x.Product.ProductType.Gender, genderUser = x.User.Gender }) .ToList(); if (trainData.Count < knnNum || trainData.Count < clusterNum) { return(Json(new { errorCode = 2, errorMessage = "אין מספיק מידע" })); } var inputs = trainData.Select(x => { double[] res = new double[] { Convert.ToInt32(x.gender), Convert.ToInt32(x.genderUser), x.type.Value, x.size }; return(res); }) .ToArray(); var codification = new Codification <double>() { CodificationVariable.Categorical, CodificationVariable.Categorical, CodificationVariable.Categorical, CodificationVariable.Discrete }; // Learn the codification from observations var model = codification.Learn(inputs); // Transform the mixed observations into only continuous: double[][] newInputs = model.ToDouble().Transform(inputs); KMedoids kmeans = new KMedoids(k: clusterNum); var clusters = kmeans.Learn(newInputs); int[] labels = clusters.Decide(newInputs); var knn5 = new KNearestNeighbors(k: knnNum); knn5.Learn(newInputs, labels); var purchasesById = _context.Purchases .Where(x => x.Product != null) .Select(x => new { userId = x.UserId.Value, size = x.Product.Size, type = x.Product.ProductTypeId, gender = x.Product.ProductType.Gender, genderUser = x.User.Gender }) .GroupBy(x => x.userId) .ToList(); IList <Tuple <int, int[]> > labelsForUsers = new List <Tuple <int, int[]> >(); for (int i = 0; i < purchasesById.Count; i++) { var userInputs = purchasesById[i]. Select(x => { double[] res = new double[] { Convert.ToInt32(x.gender), Convert.ToInt32(x.genderUser), x.type.Value, x.size }; return(res); }) .ToArray(); double[][] newUserInputs = model.ToDouble().Transform(userInputs); labelsForUsers.Add(new Tuple <int, int[]>(purchasesById[i].Key, clusters.Decide(newUserInputs).Distinct().ToArray())); } var productIdsUserBought = _context.Purchases .Where(x => x.UserId == userId) .Select(x => x.ProductId) .Distinct() .ToList(); var validProductTypeIds = _context.Purchases .Where(x => x.UserId == userId) .Select(x => x.Product.ProductTypeId) .Distinct() .ToList(); var productsToPredict = _context.Products .Where(x => !productIdsUserBought.Contains(x.Id)) .Where(x => validProductTypeIds.Contains(x.ProductTypeId)) .Select(x => new { id = x.Id, size = x.Size, type = x.ProductTypeId, gender = x.ProductType.Gender, genderUser = userGender }) .ToList(); var predInputs = productsToPredict.Select(x => { double[] res = new double[] { Convert.ToInt32(x.gender), Convert.ToInt32(x.genderUser), x.type.Value, x.size }; return(res); }) .ToArray(); double[][] newPredInputs = model.ToDouble().Transform(predInputs); int[] newLabels = knn5.Decide(newPredInputs); IList <int> productIdsPrediction = new List <int>(); var userLabels = labelsForUsers.Where(x => x.Item1 == userId).FirstOrDefault() != null? labelsForUsers.Where(x => x.Item1 == userId).FirstOrDefault().Item2 : new int[0]; for (int i = 0; i < newLabels.Length; i++) { if (userLabels.Contains(newLabels[i])) { productIdsPrediction.Add(productsToPredict[i].id); } } var predictedProduct = _context.Products .Where(x => productIdsPrediction.Contains(x.Id)) .Select(x => new { Id = x.Id, Name = x.Name, Price = x.Price, Size = x.Size, PictureName = x.PictureName }) .ToList(); return(Json(new { products = predictedProduct }, JsonRequestBehavior.AllowGet)); }
private void button1_Click(object sender, EventArgs e) { double threshold = 1.3d; // пороговое значение для отношения амплитуд List <int> output1 = new List <int>(); List <int> output2 = new List <int>(); //List<int> outputFilter = new List<int>(); int[] nums = new int[] { 1, 7, 13, 19, 25, 31, 37, 43, 49, 55 }; //int[] nums = new int[] { 0, 6, 12, 18, 24, 30, 36, 42, 48, 54 }; int k; for (int n = 0; n < nums.Length; n++) { k = nums[n]; for (int i = 1000; i < data[k, 0].X.Count; i++) { double r0 = data[k, 0].RMS[i], r1 = data[k, 1].RMS[i], dr; if ((r0 >= r1) && (r0 / r1 >= threshold)) { dr = r0 / r1; } else if ((r0 < r1) && (r1 / r0 >= threshold)) { dr = -r1 / r0; } else { dr = 0; } //dr = r0 / r1; double[] input = { data[k, 0].Turns[i], data[k, 1].Turns[i], data[k, 0].ZeroCrossings[i], data[k, 1].ZeroCrossings[i], dr }; output1.Add(knn.Decide(input) + 1); output2.Add(n + 1); } } //int start; //int end; //int width = 700; //int[] outs = new int[nums.Length]; //for (int o = 0; o < outs.Length; o++) // outs[o] = 0; //for (int i = 0; i < output1.Count; i++) //{ // if (i < width) start = 0; // else start = i - width; // end = i; // for (int j = start; j < end; j++) // outs[output1[j]]++; // outputFilter.Add(Array.IndexOf(outs, outs.Max())+1); // for (int o = 0; o < outs.Length; o++) // outs[o] = 0; //} Series out1 = new Series("Выход1") { ChartType = SeriesChartType.Line, ChartArea = "Plot" }; Series out2 = new Series("Выход2") { ChartType = SeriesChartType.Line, ChartArea = "Plot" }; for (int i = 0; i < output1.Count; i++) { //out1.Points.AddXY(i, outputFilter[i]); out1.Points.AddXY(i, output1[i]); out2.Points.AddXY(i, output2[i]); } OutChart.ChartAreas.Add("Plot"); OutChart.ChartAreas["Plot"].AxisX.Maximum = output1.Count; OutChart.ChartAreas["Plot"].AxisX.Minimum = 0; OutChart.ChartAreas["Plot"].AxisY.Maximum = 11; OutChart.ChartAreas["Plot"].AxisY.Minimum = -1; OutChart.Series.Add(out1); OutChart.Series.Add(out2); }