/// <summary> /// Classify our data using naive bias classifer and save the model. /// </summary> /// <param name="train_data">Frame objects that we will use to train classifers.</param> /// <param name="test_data">Frame objects that we will use to test classifers.</param> /// <param name="train_label">Labels of the train data.</param> /// <param name="test_label">Labels of the test data.</param> /// <param name="Classifier_Path">Path where we want to save the classifer on the disk.</param> /// <param name="Classifier_Name">Name of the classifer we wnat to save.</param> /// <returns></returns> public void Naive_Bias(double[][] train_data, double[][] test_data, int[] train_label, int[] test_label, String Classifier_Path, String Classifier_Name) { Accord.Math.Random.Generator.Seed = 0; int[][] tr_da = new int[train_data.Length][]; for (int i = 0; i < train_data.Length; i++) { int[] temp = new int[2]; temp[0] = (int)train_data[i][0]; temp[1] = (int)train_data[i][1]; tr_da[i] = temp; } int[][] te_da = new int[test_data.Length][]; for (int i = 0; i < test_data.Length; i++) { int[] temp = new int[2]; temp[0] = (int)test_data[i][0]; temp[1] = (int)test_data[i][1]; te_da[i] = temp; } // Let us create a learning algorithm var learner = new NaiveBayesLearning(); // and teach a model on the data examples NaiveBayes nb = learner.Learn(tr_da, train_label); // Now, let's test the model output for the first input sample: //int answer = nb.Decide(new int[] {20,10000}); // should be 1 double[] scores = nb.Probability(te_da); nb.Save(Path.Combine(Classifier_Path, Classifier_Name)); }
public void laplace_smoothing_missing_sample() { #region doc_laplace // To test the effectiveness of the Laplace rule for when // an example of a symbol is not present in the training set, // lets create dataset where the second column could contain // values 0, 1 or 2 but only actually contains examples with // containing 1 and 2: int[][] inputs = { // input output new [] { 0, 1 }, // 0 new [] { 0, 2 }, // 0 new [] { 0, 1 }, // 0 new [] { 1, 2 }, // 1 new [] { 0, 2 }, // 1 new [] { 0, 2 }, // 1 new [] { 1, 1 }, // 2 new [] { 0, 1 }, // 2 new [] { 1, 1 }, // 2 }; int[] outputs = // those are the class labels { 0, 0, 0, 1, 1, 1, 2, 2, 2, }; // Since the data is not enough to determine which symbols we are // expecting in our model, we will have to specify the model by // hand. The first column can assume 2 different values, whereas // the third column can assume 3: var bayes = new NaiveBayes(classes: 3, symbols: new[] { 2, 3 }); // Now we can create a learning algorithm var learning = new NaiveBayesLearning() { Model = bayes }; // Enable the use of the Laplace rule learning.Options.InnerOption.UseLaplaceRule = true; // Learn the Naive Bayes model learning.Learn(inputs, outputs); // Estimate a sample with 0 in the second col int answer = bayes.Decide(new int[] { 0, 1 }); #endregion Assert.AreEqual(0, answer); double prob = bayes.Probability(new int[] { 0, 1 }, out answer); Assert.AreEqual(0, answer); Assert.AreEqual(0.52173913043478259, prob, 1e-10); double error = new ZeroOneLoss(outputs) { Mean = true }.Loss(bayes.Decide(inputs)); Assert.AreEqual(2 / 9.0, error); }
public override double Probabilty(FeatureVector feature) { return(_bayes.Probability(Array.ConvertAll(feature.BandIntensities, s => (double)s / ushort.MaxValue))); }