// поиск с указанием найденной позиции в тексте
        public void DoSearch(String db, String querystr, global::Lucene.Net.Store.Directory indexDirectory)
        {
            // 1. Specify the analyzer for tokenizing text.
            //    The same analyzer should be used as was used for indexing
            StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_30, ListStopWords);


            // 2. query
            Query q = new QueryParser(Version.LUCENE_30, "LineText", analyzer).Parse(querystr);

            // 3. search
            int           hitsPerPage = 10;
            IndexSearcher searcher    = new IndexSearcher(indexDirectory, true);
            IndexReader   reader      = IndexReader.Open(indexDirectory, true);

            searcher.SetDefaultFieldSortScoring(true, false);
            TopScoreDocCollector collector = TopScoreDocCollector.Create(hitsPerPage, true);

            searcher.Search(q, collector);
            ScoreDoc[] hits = collector.TopDocs().ScoreDocs;

            // 4. display term positions, and term indexes
            MessageBox.Show("Found " + hits.Length + " hits.");

            for (int i = 0; i < hits.Length; ++i)
            {
                int                docId    = hits[i].Doc;
                ITermFreqVector    tfvector = reader.GetTermFreqVector(docId, "LineText");
                TermPositionVector tpvector = (TermPositionVector)tfvector;
                // this part works only if there is one term in the query string,
                // otherwise you will have to iterate this section over the query terms.
                int   termidx  = tfvector.IndexOf(querystr);
                int[] termposx = tpvector.GetTermPositions(termidx);
                TermVectorOffsetInfo[] tvoffsetinfo = tpvector.GetOffsets(termidx);

                for (int j = 0; j < termposx.Length; j++)
                {
                    MessageBox.Show("termpos : " + termposx[j]);
                }
                for (int j = 0; j < tvoffsetinfo.Length; j++)
                {
                    int offsetStart = tvoffsetinfo[j].StartOffset;
                    int offsetEnd   = tvoffsetinfo[j].EndOffset;
                    MessageBox.Show("offsets : " + offsetStart + " " + offsetEnd);
                }

                // print some info about where the hit was found...
                Document d = searcher.Doc(docId);
                MessageBox.Show((i + 1) + ". " + d.Get("path"));
            }

            // searcher can only be closed when there
            // is no need to access the documents any more.
            searcher.Dispose();
        }
        public IEnumerable <SampleDataFileRow> _search(string searchTerm, int precision, global::Lucene.Net.Store.Directory indexDirectory)
        {
            Debug.Assert(!String.IsNullOrEmpty(searchTerm));

            List <SampleDataFileRow> results = new List <SampleDataFileRow>();

            if (String.IsNullOrEmpty(searchTerm))
            {
                return(results);
            }

            using (IndexSearcher searcher = new IndexSearcher(indexDirectory))
            {
                var analyzer = new StandardAnalyzer(Version.LUCENE_30, ListStopWords);

                QueryParser parser = new QueryParser(Version.LUCENE_30, "LineText", analyzer);

                if (precision > 0 && precision < 100)
                {
                    parser.FuzzyMinSim = ((float)precision) * 0.01f;
                }
                else if (precision == 100)
                {
                    parser.FuzzyMinSim = 0.99f;
                }
                else
                {
                    parser.FuzzyMinSim = 0.8f;
                }

                //parser.PhraseSlop = 5;

                var query = ParseQuery(searchTerm, parser);

                //var query = fparseQuery(searchTerm);
                ScoreDoc[] hitsFound = searcher.Search(query, null, CountSearchResults).ScoreDocs;

                foreach (var t in hitsFound)
                {
                    var         sampleDataFileRow = new SampleDataFileRow();
                    int         docId             = t.Doc;
                    float       score             = t.Score;
                    Explanation explanation       = searcher.Explain(query, t.Doc);
                    Document    doc = searcher.Doc(docId);

                    sampleDataFileRow.LineNumber = int.Parse(doc.Get("LineNumber"));
                    sampleDataFileRow.LineText   = doc.Get("LineText");
                    sampleDataFileRow.Score      = score;

                    _explanationResult = explanation.ToString();


                    results.Add(sampleDataFileRow);
                }

                analyzer.Close();
                searcher.Dispose();
            }
            return(results.OrderByDescending(x => x.Score).ToList());
        }