public void TestEmptySource() { wordsDataSource = new SimpleWordsDataSource(); WordProbability wp = wordsDataSource.GetWordProbability("myWord"); Assert.IsNull(wp); }
public void TestAddMultipleNonMatches() { wordsDataSource = new SimpleWordsDataSource(); string word = "myWord"; int count = 10; for (int i = 0; i < count; i++) wordsDataSource.AddNonMatch(word); WordProbability wp = wordsDataSource.GetWordProbability(word); Assert.IsNotNull(wp); Assert.AreEqual(count, wp.NonMatchingCount); }
public void TestMultipleWrites() { wordsDataSource = new SimpleWordsDataSource(); string word = "myWord"; int count = 500; for (int i = 0; i < count; i++) { wordsDataSource.AddNonMatch(word + count); } }
public void TestMultipleWrites() { wordsDataSource = new SimpleWordsDataSource(); var word = "myWord"; var count = 500; for (var i = 0; i < count; i++) { wordsDataSource.AddNonMatch(word + count); } }
public void TestAddMatch() { wordsDataSource = new SimpleWordsDataSource(); wordsDataSource.AddMatch("myWord"); WordProbability wp = wordsDataSource.GetWordProbability("myWord"); Assert.IsNotNull(wp); Assert.AreEqual(1, wp.MatchingCount); Assert.AreEqual(0, wp.NonMatchingCount); wordsDataSource.AddMatch("myWord"); Assert.AreEqual(2, wp.MatchingCount); Assert.AreEqual(0, wp.NonMatchingCount); }
public BayesianClassifier(IWordsDataSource wd, ITokenizer tokenizer, IStopWordProvider swp) { if (wd == null) throw new ArgumentNullException("IWordsDataSource cannot be null."); _wordsData = wd; if (tokenizer == null) throw new ArgumentNullException("ITokenizer cannot be null."); _tokenizer = tokenizer; if (swp == null) throw new ArgumentNullException("IStopWordProvider cannot be null."); _stopWordProvider = swp; }
public void TestAddMultipleNonMatches() { wordsDataSource = new SimpleWordsDataSource(); string word = "myWord"; int count = 10; for (int i = 0; i < count; i++) { wordsDataSource.AddNonMatch(word); } WordProbability wp = wordsDataSource.GetWordProbability(word); Assert.IsNotNull(wp); Assert.AreEqual(count, wp.NonMatchingCount); }
public void TestAddMultipleMatches() { wordsDataSource = new SimpleWordsDataSource(); var word = "myWord"; var count = 10; for (var i = 0; i < count; i++) { wordsDataSource.AddMatch(word); } var wp = wordsDataSource.GetWordProbability(word); Assert.IsNotNull(wp); Assert.AreEqual(count, wp.MatchingCount); }
public BayesianClassifier(IWordsDataSource wd, ITokenizer tokenizer, IStopWordProvider swp) { if (wd == null) { throw new ArgumentNullException("IWordsDataSource cannot be null."); } _wordsData = wd; if (tokenizer == null) { throw new ArgumentNullException("ITokenizer cannot be null."); } _tokenizer = tokenizer; if (swp == null) { throw new ArgumentNullException("IStopWordProvider cannot be null."); } _stopWordProvider = swp; }
public BayesianClassifier(IWordsDataSource wordsDataSource, ITokenizer tokenizer, IStopWordProvider stopWordProvider) { if (wordsDataSource == null) { throw new ArgumentNullException("wordsDataSource"); } _wordsData = wordsDataSource; if (tokenizer == null) { throw new ArgumentNullException("tokenizer"); } _tokenizer = tokenizer; if (stopWordProvider == null) { throw new ArgumentNullException("stopWordProvider"); } _stopWordProvider = stopWordProvider; }
public BayesianClassifier(IWordsDataSource wd, ITokenizer tokenizer) : this(wd, tokenizer, new DefaultStopWordProvider()) { }
public BayesianClassifier(IWordsDataSource wd) : this(wd, new DefaultTokenizer(DefaultTokenizer.BREAK_ON_WORD_BREAKS)) { }
public BayesianClassifier(IWordsDataSource wordsDataSource) : this(wordsDataSource, new DefaultTokenizer(DefaultTokenizer.BREAK_ON_WORD_BREAKS)) { }
public BayesianClassifier(IWordsDataSource wordsDataSource, ITokenizer tokenizer) : this(wordsDataSource, tokenizer, new DefaultStopWordProvider()) { }
public void TestMultipleWrites() { wordsDataSource = new SimpleWordsDataSource(); string word = "myWord"; int count = 500; for (int i =0; i < count; i++) wordsDataSource.AddNonMatch(word + count); }