public static void SaveKnn(KNearestNeighbors knn) { // After we have created and learned our model, let's say we would // like to save it to disk. For this, we can import the Accord.IO // namespace at the top of our source file namespace, and then use // Serializer's extension method Save: // Save to a file called "knn.bin" in the basePath directory: knn.Save(Path.Combine(basePath, "knn.bin")); }
/// <summary> /// Classify our data using k-nearest neighbors classifer and save the model. /// </summary> /// <param name="train_data">Frame objects that we will use to train classifers.</param> /// <param name="test_data">Frame objects that we will use to test classifers.</param> /// <param name="train_label">Labels of the train data.</param> /// <param name="test_label">Labels of the test data.</param> /// <param name="Classifier_Path">Path where we want to save the classifer on the disk.</param> /// <param name="Classifier_Name">Name of the classifer we wnat to save.</param> /// <returns></returns> public void Knn(double[][] train_data, double[][] test_data, int[] train_label, int[] test_label, String Classifier_Path, String Classifier_Name) { KNearestNeighbors knn = new KNearestNeighbors(k: 5); knn.Learn(train_data, train_label); int answer = knn.Decide(new double[] { 117.07004523277283, 119.9104585647583 }); var cm = GeneralConfusionMatrix.Estimate(knn, test_data, test_label); double error = cm.Error; Console.WriteLine(error); knn.Save(Path.Combine(Classifier_Path, Classifier_Name)); }
public void SaveLearnedModel() { KnnModel.Save(Path.Combine(Constants.BasePath, _experimentName + ".bin")); }
private void saveModel() { classifier.Save(modelPath); }
public void learn_string() { string basePath = NUnit.Framework.TestContext.CurrentContext.TestDirectory; #region doc_learn_text // The k-Nearest Neighbors algorithm can be used with // any kind of data. In this example, we will see how // it can be used to compare, for example, Strings. string[] inputs = { "Car", // class 0 "Bar", // class 0 "Jar", // class 0 "Charm", // class 1 "Chair" // class 1 }; int[] outputs = { 0, 0, 0, // First three are from class 0 1, 1, // And next two are from class 1 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 1. This means that, for a given // instance, only its nearest neighbor will be used to cast a new // decision. // In order to compare strings, we will be using Levenshtein's string distance var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein()); // We learn the algorithm: knn.Learn(inputs, outputs); // After the algorithm has been created, we can use it: int answer = knn.Decide("Chars"); // answer should be 1. // Let's say we would like to compute the error matrix for the classifier: var cm = ConfusionMatrix.Estimate(knn, inputs, outputs); // We can use it to estimate measures such as double error = cm.Error; // should be 0 double acc = cm.Accuracy; // should be 1 double kappa = cm.Kappa; // should be 1 #endregion Assert.AreEqual(1, answer); Assert.AreEqual(0, error); Assert.AreEqual(1, acc); Assert.AreEqual(1, kappa); #if !NO_BINARY_SERIALIZATION knn.Save(Path.Combine(basePath, "string_knn.bin")); var loaded_knn = Serializer.Load <KNearestNeighbors <string> >(Path.Combine(basePath, "string_knn.bin")); Assert.AreEqual(1, loaded_knn.Decide("Chars")); cm = ConfusionMatrix.Estimate(loaded_knn, inputs, outputs); Assert.AreEqual(0, cm.Error); Assert.AreEqual(1, cm.Accuracy); Assert.AreEqual(1, cm.Kappa); Assert.AreEqual(knn.ClassCount, loaded_knn.ClassCount); Assert.AreEqual(knn.Distance, loaded_knn.Distance); Assert.AreEqual(knn.K, loaded_knn.K); Assert.AreEqual(knn.NumberOfClasses, loaded_knn.NumberOfClasses); Assert.AreEqual(knn.NumberOfInputs, loaded_knn.NumberOfInputs); Assert.AreEqual(knn.NumberOfOutputs, loaded_knn.NumberOfOutputs); Assert.AreEqual(knn.Outputs, loaded_knn.Outputs); Assert.AreEqual(knn.Token, loaded_knn.Token); #endif }
public void learn_test1() { string basePath = NUnit.Framework.TestContext.CurrentContext.TestDirectory; #region doc_learn // Create some sample learning data. In this data, // the first two instances belong to a class, the // four next belong to another class and the last // three to yet another. double[][] inputs = { // The first two are from class 0 new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, // The next four are from class 1 new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, // The last three are from class 2 new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; int[] outputs = { 0, 0, // First two from class 0 1, 1, 1, 1, // Next four from class 1 2, 2, 2 // Last three from class 2 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 4. This means that, for a given // instance, its nearest 4 neighbors will be used to cast a decision. var knn = new KNearestNeighbors(k: 4); // We learn the algorithm: knn.Learn(inputs, outputs); // After the algorithm has been created, we can classify a new instance: int answer = knn.Decide(new double[] { 11, 5, 4 }); // answer will be 2. // Let's say we would like to compute the error matrix for the classifier: var cm = GeneralConfusionMatrix.Estimate(knn, inputs, outputs); // We can use it to estimate measures such as double error = cm.Error; // should be double acc = cm.Accuracy; // should be double kappa = cm.Kappa; // should be #endregion Assert.AreEqual(2, answer); Assert.AreEqual(0, error); Assert.AreEqual(1, acc); Assert.AreEqual(1, kappa); #if !NO_BINARY_SERIALIZATION #region doc_serialization // After we have created and learned our model, let's say we would // like to save it to disk. For this, we can import the Accord.IO // namespace at the top of our source file namespace, and then use // Serializer's extension method Save: // Save to a file called "knn.bin" in the basePath directory: knn.Save(Path.Combine(basePath, "knn.bin")); // To load it back from the disk, we might need to use the Serializer class directly: var loaded_knn = Serializer.Load <KNearestNeighbors>(Path.Combine(basePath, "knn.bin")); // At this point, knn and loaded_knn should be // two different instances of identical objects. #endregion // Make sure the loaded classifier is still working Assert.AreEqual(2, loaded_knn.Decide(new double[] { 11, 5, 4 })); cm = GeneralConfusionMatrix.Estimate(loaded_knn, inputs, outputs); Assert.AreEqual(0, cm.Error); Assert.AreEqual(1, cm.Accuracy); Assert.AreEqual(1, cm.Kappa); Assert.AreEqual(knn.ClassCount, loaded_knn.ClassCount); Assert.AreEqual(knn.Distance, loaded_knn.Distance); Assert.AreEqual(knn.K, loaded_knn.K); Assert.AreEqual(knn.NumberOfClasses, loaded_knn.NumberOfClasses); Assert.AreEqual(knn.NumberOfInputs, loaded_knn.NumberOfInputs); Assert.AreEqual(knn.NumberOfOutputs, loaded_knn.NumberOfOutputs); Assert.AreEqual(knn.Outputs, loaded_knn.Outputs); Assert.AreEqual(knn.Token, loaded_knn.Token); #endif }