private void ButtonSettingTrainingData_Click(object sender, EventArgs e) { classifier = new NaiveBayesClassifier(); var allLines = TextTrainingData.Text.Split(new char[] { '\n' }, StringSplitOptions.RemoveEmptyEntries).Select(x => x.TrimEnd('\n')); SelectClass.Enabled = true; classifier.LoadAttribute( allLines.Where(x => x.StartsWith("@attribute")) .Select(x => { var name = x.Split(' ')[1].Trim(); int ia = x.LastIndexOf('{'); int ib = x.IndexOf('}', ia); return(new NaiveBayesClassifier.AttributeData { AttributeName = name, NominalOption = x.Substring(ia + 1, ib - ia - 1) .Split(SplitChars, StringSplitOptions.RemoveEmptyEntries), }); })); foreach (var o in allLines.Where(x => x.StartsWith("@attribute"))) { var name = o.Split(' ')[1].Trim(); SelectClass.Items.Add(name); } SelectClass.SelectedIndex = SelectClass.Items.Count - 1; data = allLines.SkipWhile(x => !x.StartsWith("@data")).Skip(1) .Select(x => x.Split(SplitChars, StringSplitOptions.RemoveEmptyEntries) .Select(y => y.Trim(SplitChars))); }
public void NaiveBayesIrisTest() { DataSetLoader dataSetLoader = new DataSetLoader(); Console.WriteLine(" Reading DataSet.. "); var irisis = dataSetLoader.SelectIrises(); for (double i = 0; i < 1; i = i + 1) { NaiveBayesClassifier naiveBayes = new NaiveBayesClassifier(irisis, new LinearBayesKernel(irisis)); var irisesTests = dataSetLoader.SelectIrises(); var trueCounter = 0; var counter = 0; foreach (var item in irisesTests) { var outputValue = naiveBayes.Classify(item.Item1); if (outputValue == item.Item2) { trueCounter++; } Debug.WriteLine(string.Format("Value {0} - Predicted {1} = {2}", item.Item2, outputValue, (outputValue == item.Item2) ? "true" : "false")); counter++; } Debug.WriteLine(string.Format(" i = {0} Data {1} - True {2} Verhältnis: {3}", i, counter.ToString(), trueCounter.ToString(), (Convert.ToDouble(trueCounter) / Convert.ToDouble(counter)).ToString())); } }
public void CreditDataNaiveBayesTest() { DataSetLoader dataSetLoader = new DataSetLoader(); Console.WriteLine(" Reading DataSet.. "); var creditData = dataSetLoader.SelectCreditData(); NaiveBayesClassifier naiveBayes = new NaiveBayesClassifier(creditData); var creditDataTest = dataSetLoader.SelectCreditData(); var trueCounter = 0; var counter = 0; foreach (var item in creditDataTest) { var outputValue = naiveBayes.Classify(item.Item1); if (outputValue == item.Item2) { trueCounter++; } Debug.WriteLine(string.Format("Value {0} - Predicted {1} = {2}", item.Item2, outputValue, (outputValue == item.Item2) ? "true" : "false")); counter++; } Debug.WriteLine(string.Format("Data {0} - True {1} Verhältnis: {2}", counter.ToString(), trueCounter.ToString(), (Convert.ToDouble(trueCounter) / Convert.ToDouble(counter)).ToString())); }
private static void TestNaiveBayes() { var data = LoadDataFromfCSV("Data.csv"); var fixedData = TableFixedData.FromTableData(data); var samples = TableFixedData.ToSample(fixedData); var columnsTypes = fixedData.ColumnDataTypes; var algorithm = new NaiveBayesClassifierOld(fixedData); var algorithm1 = new NaiveBayesClassifier(samples, fixedData.ClassesValue.Length, columnsTypes); var dataRow = data.ToList()[2]; var className = algorithm.Compute(dataRow); var classId = algorithm1.Compute(fixedData.GetSample(dataRow)); var className1 = fixedData.ClassesValue[classId]; int missed = 0; for (int index = 0; index < 50; index++) { var row = data.ToList()[index]; var estimatedClassName = algorithm.Compute(row); if (estimatedClassName != row.Class) { missed++; } } }
public void AnimalClassifyMethod() { DataSetLoader dataSetLoader = new DataSetLoader(); var animals = dataSetLoader.SelectAnimals(); var data = dataSetLoader.CalculatePercent(50, animals); DecisionTreeClassifier decisionTreeClassifier = new DecisionTreeClassifier(data.Item1, new ShannonEntropySplitter()); NaiveBayesClassifier naiveBayes = new NaiveBayesClassifier(data.Item1); var list = new List <NetML.Classification>(); Kernel kernel = new LinearKernel(); SVMClassifier animalSVMClassifier = new SVMClassifier(animals, kernel, 0.001, 10.0); var neuronalAnimals = dataSetLoader.SelectNeuronalNetworkAnimals(); NeuronalNetworkClassifier neuronalNetworkClassifier = new NeuronalNetworkClassifier(neuronalAnimals, 16, 7, 16, 500, 0.1); list.Add(decisionTreeClassifier); list.Add(naiveBayes); list.Add(animalSVMClassifier); list.Add(neuronalNetworkClassifier); Classifier classifier = new Classifier(); classifier.Classify(list, data.Item2); }
public void CreditDataClassifyMethod() { DataSetLoader dataSetLoader = new DataSetLoader(); var creditData = dataSetLoader.SelectCreditData(); var data = dataSetLoader.CalculatePercent(100, creditData); DecisionTreeClassifier decisionTreeClassifier = new DecisionTreeClassifier(data.Item1, new ShannonEntropySplitter()); NaiveBayesClassifier naiveBayes = new NaiveBayesClassifier(data.Item1); var list = new List <NetML.Classification>(); Kernel kernel = new LinearKernel(); SVMClassifier SVMClassifier = new SVMClassifier(creditData, kernel, 0.001, 10.0); var neuronalCreditData = dataSetLoader.SelectNeuronalNetworksCreditData(); NeuronalNetworkClassifier neuronalNetworkClassifier = new NeuronalNetworkClassifier(neuronalCreditData, 20, 2, 20, 5000, 0.1); list.Add(decisionTreeClassifier); list.Add(naiveBayes); list.Add(SVMClassifier); //list.Add(neuronalNetworkClassifier); Classifier classifier = new Classifier(); classifier.Classify(list, creditData); }
public NaiveBayesLanguageIdentifier(IEnumerable <LanguageModel <string> > languageModels, int maxNGramLength, int onlyReadFirstNLines) { MaxNGramLength = maxNGramLength; OnlyReadFirstNLines = onlyReadFirstNLines; _classifier = new NaiveBayesClassifier <IEnumerable <string>, string, LanguageInfo>( languageModels.ToDictionary(lm => lm.Language, lm => lm.Features), 1); }
public NaiveBayesLanguageIdentifier(IEnumerable<LanguageModel<ValueString>> languageModels, int maxNGramLength, int onlyReadFirstNLines) { MaxNGramLength = maxNGramLength; OnlyReadFirstNLines = onlyReadFirstNLines; _classifier = new NaiveBayesClassifier<IEnumerable<ValueString>, ValueString, LanguageInfo>( languageModels.ToDictionary(lm => lm.Language, lm => lm.Features), 1); }
// Start is called before the first frame update void Start() { guard = GetComponent <Guard>(); nGramPredictor = new NGramPredictor(nValue); bayesClassifier = new NaiveBayesClassifier(); roomsEntered = 0; lastRoomsVisited = new List <int>(); }
public void SelectFeaturesAndCheckClasification(double[,] correlationTable, int featureCount, List <List <double> > dataSet, string collectionName, string method) { NaiveBayesClassifier naiveBayesClassifier = new NaiveBayesClassifier(); var selectedFeatures = SelectFeatures(correlationTable, featureCount, 0.5); var probabilityOfFeatureValues = naiveBayesClassifier.GetProbabilityOfFeatureValuesFormFile(collectionName); CheckNewModel(selectedFeatures, dataSet, probabilityOfFeatureValues, collectionName, false, method); var selectedFeaturesBetter = SelectFeaturesBetter(correlationTable, featureCount, 0.5); CheckNewModel(selectedFeaturesBetter, dataSet, probabilityOfFeatureValues, collectionName, true, method); }
public void CheckNewModel(List <IdNumber> selectedFeaturesWithCorrelation, List <List <double> > dataSet, List <ProbabilityOfFeatureValue> probabilityOfFeatureValues, string collectionName, bool better, string method) { NaiveBayesClassifier naiveBayesClassifier = new NaiveBayesClassifier(); var classProbabilty = naiveBayesClassifier.GetClassProbabilty(dataSet); Console.WriteLine("classificationResult start "); var classificationResult = naiveBayesClassifier.ClassifyTableWithSelectedFeatures(dataSet, classProbabilty, probabilityOfFeatureValues, selectedFeaturesWithCorrelation); Console.WriteLine("classificationResult end "); var csv = new StringBuilder(); var newLine = string.Empty; csv.AppendLine("Wynik klasyfikacji:"); var sumPercent = 0; Console.WriteLine("sprawdzanie korelacji"); List <bool> classToCheck = new List <bool>(); foreach (var item in dataSet[0]) { if (item.Equals(1.00)) { classToCheck.Add(true); } else { classToCheck.Add(false); } } for (int i = 0; i < classificationResult.Count; i++) { newLine = $"{classificationResult[i] == classToCheck[i]} -------> {(classificationResult[i]? 1 : -1)};{dataSet[0][i]}"; if (classificationResult[i] == classToCheck[i]) { sumPercent++; } csv.AppendLine(newLine); } csv.AppendLine("Zgodność wyników z ograinalnym zbiorem:"); csv.AppendLine($"{(double)sumPercent/ (double)classificationResult.Count * 100} %"); csv.AppendLine($"Wybrano {selectedFeaturesWithCorrelation.Count} cechy:"); foreach (var item in selectedFeaturesWithCorrelation) { newLine = $"Column{item.Id}"; csv.AppendLine(newLine); } File.WriteAllText($"E://cos//wynikiDobreDoMGR//Classification_After_FS_{(better? 2 : 1)}_{collectionName}_{method}.txt", csv.ToString()); }
// Method for initializing the AI private void InitAI() { // Create two attributes and specify their possible values enemyTypeAttrib = new Attrib("enemyType", Enum.GetNames(typeof(EnemyType))); speedAttrib = new Attrib("speed", Enum.GetNames(typeof(SpeedClassification))); // Create a naive Bayes classifier with a set of labels and a // set of attributes nbClassifier = new NaiveBayesClassifier( Enum.GetNames(typeof(PlayerWeapon)), new Attrib[] { enemyTypeAttrib, speedAttrib }); }
private void Button_Click_3(object sender, RoutedEventArgs e) { string collectionName = CollectionName.Text; int featureCount = int.Parse(FeatureCount.Text); //int featureToSelectCount = int.Parse(FeatureToSelectCount.Text); DatabaseService databaseService = new DatabaseService(); var dataSet = databaseService.ConvertMongoColectionToListOfLists(featureCount, collectionName); NaiveBayesClassifier naiveBayesClassifier = new NaiveBayesClassifier(); Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); naiveBayesClassifier.GenerateProbabilites(dataSet, collectionName); stopWatch.Stop(); //var result = naiveBayesClassifier.GetProbabilityOfFeatureValuesFormFile(collectionName); }
public void NaiveBayesGaussianTest() { Tuple <double[], double> tuple1 = new Tuple <double[], double>(new double[] { 5.0, 1.0, 1.0 }, 0); Tuple <double[], double> tuple2 = new Tuple <double[], double>(new double[] { 1.0, 5.0, 1.0 }, 1); Tuple <double[], double> tuple3 = new Tuple <double[], double>(new double[] { 1.0, 1.0, 5.0 }, 2); List <Tuple <double[], double> > movies = new List <Tuple <double[], double> >() { tuple1, tuple2, tuple3 }; NaiveBayesClassifier naiveBayes = new NaiveBayesClassifier(movies, new GaussianBayesKernel(movies)); var result = naiveBayes.Classify(new double[] { 5.0, 1.0, 1.0 }); result = naiveBayes.Classify(new double[] { 1.0, 5.0, 1.0 }); result = naiveBayes.Classify(new double[] { 1.0, 1.0, 5.0 }); result = naiveBayes.Classify(new double[] { 0.5, 9.0, 0.1 }); result = naiveBayes.Classify(new double[] { 0.4, 0.2, 2.1 }); result = naiveBayes.Classify(new double[] { 0.3, 0.3, 2.5 }); }
public void ClassifyTest() { var classifier = new NaiveBayesClassifier(); var trainingData = new List <Tuple <string, IEnumerable <string> > > { new Tuple <string, IEnumerable <string> >("Food", new[] { "apple", "orange" }), new Tuple <string, IEnumerable <string> >("Food", new[] { "apple", "cake", "banana" }), new Tuple <string, IEnumerable <string> >("Animal", new[] { "cat", "dog" }), new Tuple <string, IEnumerable <string> >("Animal", new[] { "bird", "cat", "apple" }), }; classifier.Train(trainingData); var category = classifier.Classify(new[] { "dog" }); Assert.AreEqual("Animal", category); category = classifier.Classify(new[] { "apple", "banana" }); Assert.AreEqual("Food", category); }
public ApplicationProtocolClassificationStatisticsMeter BayesianClassify(AppIdentDataSource appIdentDataSource, double trainingToVerificationRatio, double precisionTrashHold, AppIdentTestContext appIdentTestContext = null) { var precMeasure = new ApplicationProtocolClassificationStatisticsMeter(); //this.CreateDatasetAndTestset(appIdentDataSource, trainingToVerificationRatio, out var trainingSet, out var verificationSet); var classifier = new NaiveBayesClassifier(appIdentDataSource.TrainingSet); foreach (var featureVector in appIdentDataSource.VerificationSet) { var appTag = featureVector.Label.Replace("_", "").Replace("-", ""); featureVector.Label = "Unknown"; classifier.Normalizator.Normalize(featureVector); var cl = classifier.ClassifierModel.Predict(featureVector, true); if (cl.Precision > precisionTrashHold) { precMeasure.UpdateStatistics(cl.Label, appTag); } } appIdentTestContext?.Save(precMeasure); return(precMeasure); }
private void btn_testDatafromFiles_Click(object sender, EventArgs e) { int[] countOfTrainingTweetforLanguage = { 374, 1493, 456, 12855, 971, 2169 }; FileWriter uniGramResultWriter = new FileWriter("results-unigram"); FileWriter biGramResultWriter = new FileWriter("results-bigram"); StringBuilder builderUniResult = new StringBuilder(); StringBuilder builderBiResult = new StringBuilder(); builderUniResult.Append("TweetID" + " " + "Likely Language").Append("\n"); builderBiResult.Append("TweetID" + " " + "Likely Language").Append("\n"); for (int i = 0; i < 6; i++) for (int j = 0; j < 6; j++) { labelingUniMatrixFrequency[i, j] = 0.0; labelingBiMatrixFrequency[i, j] = 0.0; } for (int i = 0; i < Languages.Length; i++) { FetchFromFolderFiles fetchFromFolder = new FetchFromFolderFiles("Testingnlp"); Hashtable languageTweetsClean = new Hashtable(); languageTweetsClean = fetchFromFolder.getTestingDataFor(Languages[i]); NaiveBayesClassifier NBC = new NaiveBayesClassifier(countOfTrainingTweetforLanguage, totalTweet, gramDictionary); foreach (DictionaryEntry entry in languageTweetsClean) { Double[] uniConfidence = NBC.ApplyBayesOnUnigram(entry.Value.ToString()); int IndexOfMaxUniConfidence = NBC.getMaxConfidence(uniConfidence); labelingUniMatrixFrequency[i, IndexOfMaxUniConfidence] = labelingUniMatrixFrequency[i, IndexOfMaxUniConfidence] + 1; builderUniResult.Append(entry.Key.ToString() + " " + Languages[IndexOfMaxUniConfidence]); builderUniResult.Append("\n"); Double[] biConfidence = NBC.ApplyBayesOnBigram(entry.Value.ToString()); int IndexOfMaxBiiConfidence = NBC.getMaxConfidence(biConfidence); labelingBiMatrixFrequency[i, IndexOfMaxBiiConfidence] = labelingBiMatrixFrequency[i, IndexOfMaxBiiConfidence] + 1; builderBiResult.Append(entry.Key.ToString() + " " + Languages[IndexOfMaxBiiConfidence]); builderBiResult.Append("\n"); } } uniGramResultWriter.resultsWriter(builderUniResult.ToString()); biGramResultWriter.resultsWriter(builderBiResult.ToString()); uniGramResultWriter.closeAnalysisWriter(); biGramResultWriter.closeAnalysisWriter(); MessageBox.Show("Done"); }
public BowNaiveBayesModel() { mModel = new NaiveBayesClassifier <LblT>(); }
public BowNaiveBayesModel(NaiveBayesClassifier <LblT> model) { mModel = Preconditions.CheckNotNull(model); }
static void Main(string[] args) { List <double[]> continuousTrainData = DataWrangler.LoadContinuousDataAsync(TrainingCsv, _indexToIgnore).Result; List <double[]> continuousTestData = DataWrangler.LoadContinuousDataAsync(TestingCsv, _indexToIgnore).Result; // Print continuous columns for calculating elbows in external tool(https://bl.ocks.org/rpgove/0060ff3b656618e9136b) foreach (int i in _continuousIndexes) { using (StreamWriter sw = new StreamWriter($"{i}.txt")) { sw.WriteLine(string.Join(",", continuousTrainData.Select(array => array[i]))); } } // Convert continuous to discrete Dictionary <int, GaussianClusterCollection> indexClusterMapping = DataWrangler.GetIndexClustersMap(continuousTrainData, _indexElbowMap); List <int[]> discreteTrainData = DataWrangler.ConvertContinuesToDiscrete(continuousTrainData, indexClusterMapping); List <int[]> discreteTestData = DataWrangler.ConvertContinuesToDiscrete(continuousTestData, indexClusterMapping); var problem = ProblemHelper.ReadProblem(discreteTrainData.Select(arr => { // Move class to front as it is expected by libsvm. int temp = arr[0]; arr[SVMSupportedClassIndex] = arr[OriginalClassIndex]; arr[OriginalClassIndex] = temp; return(arr.Select(i => (double)i).ToList()); }).ToList()); var test = ProblemHelper.ReadProblem(discreteTestData.Select(arr => { // Move class to front as it is expected by libsvm. int temp = arr[0]; arr[SVMSupportedClassIndex] = arr[OriginalClassIndex]; arr[OriginalClassIndex] = temp; return(arr.Select(i => (double)i).ToList()); }).ToList()); // defaults taken from documentation http://weka.sourceforge.net/doc.stable/weka/classifiers/functions/LibSVM.html double c = 1; // default C is 1 double gamma = 1.0 / problem.l; // default gamma is 1/k double r = 0; // default coef0 is 0 int degree = 3; // default degree is 3 Dictionary <string, Kernel> nameKernelMap = new Dictionary <string, Kernel>(StringComparer.OrdinalIgnoreCase) { { "Linear", KernelHelper.LinearKernel() }, { "Polynomial", KernelHelper.PolynomialKernel(degree, gamma, r) }, { "Radial", KernelHelper.RadialBasisFunctionKernel(gamma) }, { "Sigmoid", KernelHelper.SigmoidKernel(gamma, r) }, }; // Get accuracies for base comparison // DON'T DO PARALLEL. We don't know if the underlying implementation is MT safe or not. //Parallel.ForEach(nameKernelMap.Keys, (kernelName) => foreach (string kernelName in nameKernelMap.Keys) { Console.WriteLine($"{kernelName}: {GetSVMAccuracy(problem, test, nameKernelMap[kernelName], c)}"); } ; // Get accuracy of with Naive Bayes double[] classWeightPrior = new[] { 1.0, 1.0 }; double[] classPriorProbability = new[] { 0.5, 0.5 }; NaiveBayesClassifier naiveBayes = NaiveBayesClassifier.Load(discreteTrainData, SVMSupportedClassIndex, classWeightPrior, classPriorProbability); Console.WriteLine($"Naive Bayes: {naiveBayes.GetPredictionAccuracy(discreteTestData, SVMSupportedClassIndex)}"); // Calculate SVMs Bias and Variance List <List <int[]> > samples = Sampler.SampleData(discreteTrainData, BiasVarianceNumOfSamples); ConcurrentDictionary <string, ConcurrentDictionary <int, ConcurrentDictionary <int, int> > > kernelInstanceClassifierPredictionsMappings = new ConcurrentDictionary <string, ConcurrentDictionary <int, ConcurrentDictionary <int, int> > >(StringComparer.OrdinalIgnoreCase); foreach (string kernelName in nameKernelMap.Keys) { ConcurrentDictionary <int, ConcurrentDictionary <int, int> > instanceClassifierPredictionMappings = kernelInstanceClassifierPredictionsMappings.GetOrAdd(kernelName, new ConcurrentDictionary <int, ConcurrentDictionary <int, int> >()); for (int classifierIndex = 0; classifierIndex < BiasVarianceNumOfSamples; classifierIndex++) { problem = ProblemHelper.ReadProblem(samples[classifierIndex].Select(arr => arr.Select(i => (double)i).ToList()).ToList()); var svm = new C_SVC(problem, nameKernelMap[kernelName], c); for (int instanceIndex = 0; instanceIndex < discreteTestData.Count; instanceIndex++) { ConcurrentDictionary <int, int> classifierPredictionMappings = instanceClassifierPredictionMappings.GetOrAdd(instanceIndex, new ConcurrentDictionary <int, int>()); test = ProblemHelper.ReadProblem(new List <List <double> > { discreteTestData[instanceIndex].Select(i => (double)i).ToList() }); for (int i = 0; i < test.l; i++) { var x = test.x[i]; var y = test.y[i]; classifierPredictionMappings.GetOrAdd(classifierIndex, (int)svm.Predict(x)); } } } } Console.WriteLine("Kernel, Bias, Variance, Accuracy"); foreach (string kernelName in nameKernelMap.Keys) { ConcurrentDictionary <int, ConcurrentDictionary <int, int> > instanceClassifierPredictionMappings = kernelInstanceClassifierPredictionsMappings.GetOrAdd(kernelName, new ConcurrentDictionary <int, ConcurrentDictionary <int, int> >()); Tuple <double, double, double> biasVarianceAccuracy = BiasVarianceHelper.GetBiasVarianceAccuracy(discreteTestData, SVMSupportedClassIndex, instanceClassifierPredictionMappings); Console.WriteLine($"{kernelName}, {biasVarianceAccuracy.Item1}, {biasVarianceAccuracy.Item2}, {biasVarianceAccuracy.Item3}"); } Console.WriteLine("Press ENTER to continue..."); Console.ReadLine(); }
public void Train(IEnumerable <Tuple <string, string> > trainingSet, int count) { _wordDictionary = new Dictionary <string, int>(); _classes = new Dictionary <string, int>(); DataSample[] samples = new DataSample[count]; int wordId = 0; int classId = 0; var trainingItemIndex = 0; trainingSet = trainingSet.Take(count); foreach (var trainingItem in trainingSet) { string[] sentences = { trainingItem.Item1 }; var classValue = trainingItem.Item2; if (!_classes.ContainsKey(classValue)) { _classes.Add(classValue, classId); classId++; } var dataSample = new DataSample { ClassId = _classes[classValue] }; var sampleDataPoints = new List <DataPoint>(); foreach (var sentence in sentences) { var sentenceWords = TextParser.SplitToWords(sentence); var isNegated = false; for (int index = 0; index < sentenceWords.Count; index++) { var currentWord = sentenceWords[index].ToLower(); if (currentWord.StartsWith("@")) { continue; } if (_negationWords.Contains(currentWord)) { isNegated = !isNegated; } else { if (currentWord.EndsWith("n't")) { isNegated = !isNegated; } else { currentWord = isNegated ? "not_" + currentWord : currentWord; } if (!_wordDictionary.ContainsKey(currentWord)) { _wordDictionary.Add(currentWord, wordId); wordId++; } sampleDataPoints.Add(new DataPoint { ColumnId = _wordDictionary[currentWord], Value = 1 }); } } } dataSample.DataPoints = sampleDataPoints.ToArray(); samples[trainingItemIndex] = dataSample; trainingItemIndex++; } _columnsDataTypes = new ColumnDataType[wordId]; for (var index = 0; index < wordId; index++) { _columnsDataTypes[index] = new ColumnDataType { IsDiscrete = true, NumberOfCategories = 2 }; } _naiveBayesClassifier = new NaiveBayesClassifier(samples, 2, _columnsDataTypes); }
static void Main(string[] args) { // Create two attributes and specify their possible values Attrib distance = new Attrib("distance", new string[] { "near", "far" }); Attrib speed = new Attrib("speed", new string[] { "slow", "fast" }); // Create a naive Bayes classifier with a set of labels and a // set of attributes NaiveBayesClassifier nbc = new NaiveBayesClassifier( new string[] { "Y", "N" }, new Attrib[] { distance, speed }); // Pass a few observations to the naive Bayes classifier nbc.Update("Y", new Dictionary <Attrib, string>() { { distance, "near" }, { speed, "slow" } }); nbc.Update("Y", new Dictionary <Attrib, string>() { { distance, "near" }, { speed, "fast" } }); nbc.Update("N", new Dictionary <Attrib, string>() { { distance, "far" }, { speed, "fast" } }); nbc.Update("Y", new Dictionary <Attrib, string>() { { distance, "far" }, { speed, "fast" } }); nbc.Update("N", new Dictionary <Attrib, string>() { { distance, "near" }, { speed, "slow" } }); nbc.Update("Y", new Dictionary <Attrib, string>() { { distance, "far" }, { speed, "slow" } }); nbc.Update("Y", new Dictionary <Attrib, string>() { { distance, "near" }, { speed, "fast" } }); // Make a prediction given a set of attribute-value pairs string prediction = nbc.Predict(new Dictionary <Attrib, string>() { { distance, "far" }, { speed, "slow" } }); // Show prediction Console.WriteLine($"Brake? {prediction}"); }