public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, Random().NextBoolean()); TokenStream output = new NeverPeeksLookaheadTokenFilter(tokenizer); return(new TokenStreamComponents(tokenizer, output)); }
public virtual void TestNeverCallingPeek() { Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, Random.NextBoolean()); TokenStream output = new NeverPeeksLookaheadTokenFilter(tokenizer); return(new TokenStreamComponents(tokenizer, output)); }); CheckRandomData(Random, a, 200 * RandomMultiplier, 8192); }
public void TestNeverCallingPeek() { Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, Random.nextBoolean()); TokenStream output = new NeverPeeksLookaheadTokenFilter(tokenizer); return(new TokenStreamComponents(tokenizer, output)); }); int maxLength = TEST_NIGHTLY ? 8192 : 1024; CheckRandomData(Random, a, 50 * RANDOM_MULTIPLIER, maxLength); }
public void TestNeverCallingPeek() { Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, Random.NextBoolean()); TokenStream output = new NeverPeeksLookaheadTokenFilter(tokenizer); return(new TokenStreamComponents(tokenizer, output)); }); //int maxLength = TestNightly ? 8192 : 1024; // LUCENENET specific - reduced Nightly iterations from 8192 to 4096 // to keep it under the 1 hour free limit of Azure DevOps int maxLength = TestNightly ? 4096 : 1024; CheckRandomData(Random, a, 50 * RandomMultiplier, maxLength); }