internal static void AssertTokenizesTo(SlowSynonymMap dict, IList <Token> input, string[] expected, int[] startOffsets, int[] endOffsets, int[] posIncs) { TokenStream tokenizer = new IterTokenStream(input); SlowSynonymFilter stream = new SlowSynonymFilter(tokenizer, dict); AssertTokenStreamContents(stream, expected, startOffsets, endOffsets, posIncs); }
internal static void AssertTokenizesTo(SlowSynonymMap dict, string input, string[] expected, int[] posIncs) { Tokenizer tokenizer = new MockTokenizer(new StringReader(input), MockTokenizer.WHITESPACE, false); SlowSynonymFilter stream = new SlowSynonymFilter(tokenizer, dict); AssertTokenStreamContents(stream, expected, posIncs); }
public virtual void TestMultiWordSynonymsOld() { IList<string> rules = new JCG.List<string>(); rules.Add("a b c,d"); SlowSynonymMap synMap = new SlowSynonymMap(true); SlowSynonymFilterFactory.ParseRules(rules, synMap, "=>", ",", true, null); SlowSynonymFilter ts = new SlowSynonymFilter(new MockTokenizer(new StringReader("a e"), MockTokenizer.WHITESPACE, false), synMap); // This fails because ["e","e"] is the value of the token stream AssertTokenStreamContents(ts, new string[] { "a", "e" }); }
public virtual void TestMultiWordSynonymsOld() { IList<string> rules = new List<string>(); rules.Add("a b c,d"); SlowSynonymMap synMap = new SlowSynonymMap(true); SlowSynonymFilterFactory.ParseRules(rules, synMap, "=>", ",", true, null); SlowSynonymFilter ts = new SlowSynonymFilter(new MockTokenizer(new StringReader("a e"), MockTokenizer.WHITESPACE, false), synMap); // This fails because ["e","e"] is the value of the token stream AssertTokenStreamContents(ts, new string[] { "a", "e" }); }
internal static void AssertTokenizesTo(SlowSynonymMap dict, IList<Token> input, string[] expected, int[] startOffsets, int[] endOffsets, int[] posIncs) { TokenStream tokenizer = new IterTokenStream(input); SlowSynonymFilter stream = new SlowSynonymFilter(tokenizer, dict); AssertTokenStreamContents(stream, expected, startOffsets, endOffsets, posIncs); }
internal static void AssertTokenizesTo(SlowSynonymMap dict, string input, string[] expected, int[] posIncs) { Tokenizer tokenizer = new MockTokenizer(new StringReader(input), MockTokenizer.WHITESPACE, false); SlowSynonymFilter stream = new SlowSynonymFilter(tokenizer, dict); AssertTokenStreamContents(stream, expected, posIncs); }