private void Index(Trie <IndexedEntry> trie, string word, IndexedEntry entry) { if (string.IsNullOrWhiteSpace(word)) { return; } var parts = Tokenizer.Tokenize(word).ToArray(); for (var i = 0; i < parts.Length; i++) { // Stop word? if (!_stopWords.Contains(parts[i]) && parts[i].Length > 2) { trie.Insert(parts[i], entry); } // PascalCase? var pascalCaseParts = Tokenizer.TokenizePascalCase(parts[i]).ToArray(); if (pascalCaseParts.Length > 1) { foreach (var pascalCasePart in pascalCaseParts) { trie.Insert(pascalCasePart, entry); } } // Build partial sentence. trie.Insert(string.Join(" ", parts.Skip(i).Take(parts.Length - i)), entry); } }
private static float CalculateDistance(IndexedEntry entry, string query) { #if DEBUG return(Math.Min(LevenshteinScorer.Distance(entry.Title, query), LevenshteinScorer.Distance(entry.Description ?? entry.Title, query))); #else return(0); #endif }
private void Index(Trie <IndexedEntry> trie, string word, IndexedEntry entry) { if (string.IsNullOrWhiteSpace(word)) { return; } var parts = Tokenizer.Tokenize(word).ToArray(); for (var i = 0; i < parts.Length; i++) { if (!_stopWords.Contains(parts[i]) && parts[i].Length > 2) { trie.Insert(parts[i], entry); } trie.Insert(string.Join(" ", parts.Skip(i).Take(parts.Length - i)), entry); } }
private static float CalculateScore(IndexedEntry entry, string query) { return(Math.Min(LevenshteinScorer.Score(entry.Title, query), LevenshteinScorer.Score(entry.Description ?? entry.Title, query))); }