public void TestWrapReader() { // LUCENE-5153: test that wrapping an analyzer's reader is allowed Random random = Random; Analyzer @delegate = new MockAnalyzer(random); Analyzer a = new AnalyzerWrapperAnonymousHelper(@delegate); CheckOneTerm(a, "abc", "aabc"); }
private void matchedFieldsTestCase(bool useMatchedFields, bool fieldMatch, String fieldValue, String expected, params Query[] queryClauses) { Document doc = new Document(); FieldType stored = new FieldType(TextField.TYPE_STORED); stored.StoreTermVectorOffsets = (true); stored.StoreTermVectorPositions = (true); stored.StoreTermVectors = (true); stored.Freeze(); FieldType matched = new FieldType(TextField.TYPE_NOT_STORED); matched.StoreTermVectorOffsets = (true); matched.StoreTermVectorPositions = (true); matched.StoreTermVectors = (true); matched.Freeze(); doc.Add(new Field("field", fieldValue, stored)); // Whitespace tokenized with English stop words doc.Add(new Field("field_exact", fieldValue, matched)); // Whitespace tokenized without stop words doc.Add(new Field("field_super_exact", fieldValue, matched)); // Whitespace tokenized without toLower doc.Add(new Field("field_characters", fieldValue, matched)); // Each letter is a token doc.Add(new Field("field_tripples", fieldValue, matched)); // Every three letters is a token doc.Add(new Field("field_sliced", fieldValue.Substring(0, // Sliced at 10 chars then analyzed just like field Math.Min(fieldValue.Length - 1, 10) - 0), matched)); doc.Add(new Field("field_der_red", new CannedTokenStream( // Hacky field containing "der" and "red" at pos = 0 token("der", 1, 0, 3), token("red", 0, 0, 3) ), matched)); Analyzer analyzer = new AnalyzerWrapperAnonymousHelper(); Directory dir = NewDirectory(); IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); writer.AddDocument(doc); FastVectorHighlighter highlighter = new FastVectorHighlighter(); IFragListBuilder fragListBuilder = new SimpleFragListBuilder(); IFragmentsBuilder fragmentsBuilder = new ScoreOrderFragmentsBuilder(); IndexReader reader = DirectoryReader.Open(writer, true); String[] preTags = new String[] { "<b>" }; String[] postTags = new String[] { "</b>" }; IEncoder encoder = new DefaultEncoder(); int docId = 0; BooleanQuery query = new BooleanQuery(); foreach (Query clause in queryClauses) { query.Add(clause, Occur.MUST); } FieldQuery fieldQuery = new FieldQuery(query, reader, true, fieldMatch); String[] bestFragments; if (useMatchedFields) { ISet <String> matchedFields = new HashSet <String>(); matchedFields.Add("field"); matchedFields.Add("field_exact"); matchedFields.Add("field_super_exact"); matchedFields.Add("field_characters"); matchedFields.Add("field_tripples"); matchedFields.Add("field_sliced"); matchedFields.Add("field_der_red"); bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", matchedFields, 25, 1, fragListBuilder, fragmentsBuilder, preTags, postTags, encoder); } else { bestFragments = highlighter.GetBestFragments(fieldQuery, reader, docId, "field", 25, 1, fragListBuilder, fragmentsBuilder, preTags, postTags, encoder); } assertEquals(expected, bestFragments[0]); reader.Dispose(); writer.Dispose(); dir.Dispose(); }