public void TestWhichMTQMatched()
        {
            Directory dir = NewDirectory();
            // use simpleanalyzer for more natural tokenization (else "test." is a token)
            Analyzer          analyzer = new MockAnalyzer(Random, MockTokenizer.SIMPLE, true);
            IndexWriterConfig iwc      = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random, dir, iwc);

            FieldType offsetsType = new FieldType(TextField.TYPE_STORED);

            offsetsType.IndexOptions = (IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
            Field    body = new Field("body", "", offsetsType);
            Document doc  = new Document();

            doc.Add(body);

            body.SetStringValue("Test a one sentence document.");
            iw.AddDocument(doc);

            IndexReader ir = iw.GetReader();

            iw.Dispose();

            IndexSearcher          searcher    = NewSearcher(ir);
            ICUPostingsHighlighter highlighter = new PostingsHighlighterAnalyzerHelper(analyzer);
            BooleanQuery           query       = new BooleanQuery();

            query.Add(new WildcardQuery(new Term("body", "te*")), Occur.SHOULD);
            query.Add(new WildcardQuery(new Term("body", "one")), Occur.SHOULD);
            query.Add(new WildcardQuery(new Term("body", "se*")), Occur.SHOULD);
            TopDocs topDocs = searcher.Search(query, null, 10, Sort.INDEXORDER);

            assertEquals(1, topDocs.TotalHits);
            String[] snippets = highlighter.Highlight("body", query, searcher, topDocs);
            assertEquals(1, snippets.Length);

            // Default formatter just bolds each hit:
            assertEquals("<b>Test</b> a <b>one</b> <b>sentence</b> document.", snippets[0]);

            // Now use our own formatter, that also stuffs the
            // matching term's text into the result:
            highlighter = new PostingsHighlighterAnalyzerAndFormatterHelper(analyzer, new PassageFormatterHelper());

            assertEquals(1, topDocs.TotalHits);
            snippets = highlighter.Highlight("body", query, searcher, topDocs);
            assertEquals(1, snippets.Length);

            // Default formatter bolds each hit:
            assertEquals("<b>Test(body:te*)</b> a <b>one(body:one)</b> <b>sentence(body:se*)</b> document.", snippets[0]);

            ir.Dispose();
            dir.Dispose();
        }
        public void TestWhichMTQMatched()
        {
            Directory dir = NewDirectory();
            // use simpleanalyzer for more natural tokenization (else "test." is a token)
            Analyzer          analyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
            IndexWriterConfig iwc      = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            FieldType offsetsType = new FieldType(TextField.TYPE_STORED);

            offsetsType.IndexOptions = (IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
            Field    body = new Field("body", "", offsetsType);
            Document doc  = new Document();

            doc.Add(body);

            body.SetStringValue("Test a one sentence document.");
            iw.AddDocument(doc);

            IndexReader ir = iw.Reader;

            iw.Dispose();

            IndexSearcher       searcher    = NewSearcher(ir);
            PostingsHighlighter highlighter = new PostingsHighlighterAnalyzerHelper(analyzer);
            //PostingsHighlighter highlighter = new PostingsHighlighter() {
            //      @Override
            //      protected Analyzer getIndexAnalyzer(String field)
            //{
            //    return analyzer;
            //}
            //    };
            BooleanQuery query = new BooleanQuery();

            query.Add(new WildcardQuery(new Term("body", "te*")), Occur.SHOULD);
            query.Add(new WildcardQuery(new Term("body", "one")), Occur.SHOULD);
            query.Add(new WildcardQuery(new Term("body", "se*")), Occur.SHOULD);
            TopDocs topDocs = searcher.Search(query, null, 10, Sort.INDEXORDER);

            assertEquals(1, topDocs.TotalHits);
            String[] snippets = highlighter.Highlight("body", query, searcher, topDocs);
            assertEquals(1, snippets.Length);

            // Default formatter just bolds each hit:
            assertEquals("<b>Test</b> a <b>one</b> <b>sentence</b> document.", snippets[0]);

            // Now use our own formatter, that also stuffs the
            // matching term's text into the result:
            highlighter = new PostingsHighlighterAnalyzerAndFormatterHelper(analyzer, new PassageFormatterHelper());

            //highlighter = new PostingsHighlighter()
            //{
            //    @Override
            //      protected Analyzer getIndexAnalyzer(String field)
            //{
            //    return analyzer;
            //}

            //@Override
            //      protected PassageFormatter getFormatter(String field)
            //{
            //    return new PassageFormatter() {

            //          @Override
            //          public Object format(Passage passages[], String content)
            //{
            //    // Copied from DefaultPassageFormatter, but
            //    // tweaked to include the matched term:
            //    StringBuilder sb = new StringBuilder();
            //    int pos = 0;
            //    for (Passage passage : passages)
            //    {
            //        // don't add ellipsis if its the first one, or if its connected.
            //        if (passage.startOffset > pos && pos > 0)
            //        {
            //            sb.append("... ");
            //        }
            //        pos = passage.startOffset;
            //        for (int i = 0; i < passage.numMatches; i++)
            //        {
            //            int start = passage.matchStarts[i];
            //            int end = passage.matchEnds[i];
            //            // its possible to have overlapping terms
            //            if (start > pos)
            //            {
            //                sb.append(content, pos, start);
            //            }
            //            if (end > pos)
            //            {
            //                sb.append("<b>");
            //                sb.append(content, Math.max(pos, start), end);
            //                sb.append('(');
            //                sb.append(passage.getMatchTerms()[i].utf8ToString());
            //                sb.append(')');
            //                sb.append("</b>");
            //                pos = end;
            //            }
            //        }
            //        // its possible a "term" from the analyzer could span a sentence boundary.
            //        sb.append(content, pos, Math.max(pos, passage.endOffset));
            //        pos = passage.endOffset;
            //    }
            //    return sb.toString();
            //}
            //        };
            //      }
            //    };


            assertEquals(1, topDocs.TotalHits);
            snippets = highlighter.Highlight("body", query, searcher, topDocs);
            assertEquals(1, snippets.Length);

            // Default formatter bolds each hit:
            assertEquals("<b>Test(body:te*)</b> a <b>one(body:one)</b> <b>sentence(body:se*)</b> document.", snippets[0]);

            ir.Dispose();
            dir.Dispose();
        }