public IEnumerable<Hit> Search(string query, int maxResults) { var analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29); QueryParser qp = new QueryParser( Lucene.Net.Util.Version.LUCENE_29, "contents", analyzer ); Query q = qp.Parse(query); TopDocs top = searcher.Search(q, maxResults); List<Hit> result = new List<Hit>(top.totalHits); for (int index = 0; index < top.totalHits; index++) { var doc = searcher.Doc(top.scoreDocs[index].doc); string contents = doc.Get("contents"); var scorer = new QueryScorer(q, searcher.GetIndexReader(), "contents"); var highlighter = new Highlighter(scorer); result.Add(new Hit() { Relevance = top.scoreDocs[index].score, Title = doc.Get("title"), Url = doc.Get("path"), Excerpt = highlighter.GetBestFragment(analyzer, "contents", contents) }); } return result; }
public string HighlightContent(string text) { QueryScorer scorer = new QueryScorer(GetQuery()); Formatter formatter = new SimpleHTMLFormatter("<span style='color:maroon; font-weight:bold;'>", "</span>"); Highlighter highlighter = new Highlighter(formatter, scorer); highlighter.SetTextFragmenter(new SimpleFragmenter(120)); TokenStream stream = new StandardAnalyzer().TokenStream("Content", new StringReader(text)); var fragments = highlighter.GetBestFragments(stream, text, 3); if (fragments == null || fragments.Length == 0) return text.Length > 120 ? text.Substring(0, 120) + "..." : text; string highlighted = ""; foreach (var fragment in fragments) { if (text.StartsWith(fragment)) highlighted += "<p>" + fragment + " ... </p>"; else if (text.EndsWith(fragment)) highlighted += "<p> ... " + fragment + "</p>"; else highlighted += "<p> ... " + fragment + " ... </p>"; } return highlighted; }
public string GetHighlight(string value, IndexSearcher searcher, string highlightField, Query luceneQuery) { var scorer = new QueryScorer(luceneQuery.Rewrite(searcher.GetIndexReader())); var highlighter = new Highlighter(HighlightFormatter, scorer); var tokenStream = HighlightAnalyzer.TokenStream(highlightField, new StringReader(value)); return highlighter.GetBestFragments(tokenStream, value, MaxNumHighlights, Separator); }
public string GetHighlight(string value, string highlightField, Searcher searcher, string luceneRawQuery) { var query = GetQueryParser(highlightField).Parse(luceneRawQuery); var scorer = new QueryScorer(searcher.Rewrite(query)); var highlighter = new Highlighter(HighlightFormatter, scorer); var tokenStream = HighlightAnalyzer.TokenStream(highlightField, new StringReader(value)); string bestFragments = highlighter.GetBestFragments(tokenStream, value, MaxNumHighlights, Separator); return bestFragments; }
public string HighlightTitle(string text) { QueryScorer scorer = new QueryScorer(GetQuery()); Formatter formatter = new SimpleHTMLFormatter("<span style='color:maroon; font-weight:bold;'>", "</span>"); Highlighter highlighter = new Highlighter(formatter, scorer); highlighter.SetTextFragmenter(new NullFragmenter()); TokenStream stream = new StandardAnalyzer().TokenStream("Title", new StringReader(text)); var title = highlighter.GetBestFragment(stream, text); return title ?? text; }
public IEnumerable<SearchResult> Search(string query) { Analyzer analyzer = new SnowballAnalyzer("English"); QueryParser parser = new QueryParser("text", analyzer); Query luceneQuery = parser.Parse(query); Directory directory = FSDirectory.GetDirectory(indexPath); IndexSearcher searcher = new IndexSearcher(directory); QueryScorer queryScorer = new QueryScorer(luceneQuery); Highlighter highlighter = new Highlighter(queryScorer); TopDocs topDocs = searcher.Search(luceneQuery, 100); var searchResults = new List<SearchResult>(); foreach (ScoreDoc scoreDoc in topDocs.scoreDocs) { Document doc = searcher.Doc(scoreDoc.doc); searchResults.Add(new SearchResult { Path = doc.Get("path"), Score = scoreDoc.score, Title = doc.Get("title"), Preview = highlighter.GetBestFragment(analyzer, "text", doc.Get("text")) }); } return searchResults; }
public IEnumerable<SearchResult> Search(string luceneQuery, int maxResults = 500, string highlightOpenTag = null, string highlightCloseTag = null, params string[] fieldsToHighlight) { var results = new List<SearchResult>(); if (String.IsNullOrWhiteSpace(luceneQuery)) return results; var parser = new MultiFieldQueryParser(LuceneVersion, searchFields, new StandardAnalyzer(LuceneVersion)); Query query = parser.Parse(luceneQuery); TopDocs topDocs = searcher.Search(query, maxResults); foreach (ScoreDoc scoreDoc in topDocs.ScoreDocs) { Document document = reader.Document(scoreDoc.doc); var result = new SearchResult(document, scoreDoc.score); results.Add(result); } if(!String.IsNullOrEmpty(highlightOpenTag) && !String.IsNullOrEmpty(highlightCloseTag) && fieldsToHighlight.Length > 0) { var scorer = new QueryScorer(query); var formatter = new SimpleHTMLFormatter(highlightOpenTag, highlightCloseTag); var highlighter = new Highlighter(formatter, scorer); highlighter.SetTextFragmenter(new SimpleFragmenter()); foreach (SearchResult result in results) { foreach (string highlightField in fieldsToHighlight) { if(!result.Fields.ContainsKey(highlightField)) continue; string fieldValue = result[highlightField]; TokenStream stream = new StandardAnalyzer(LuceneVersion).TokenStream(highlightField, new StringReader(fieldValue)); string highlightedFieldValue = highlighter.GetBestFragments(stream, fieldValue, 500, "..."); if (!String.IsNullOrWhiteSpace(highlightedFieldValue)) { result.Fields[highlightField] = highlightedFieldValue; } } } } return results; }
private string GeneratePreviewText(Query q, string text) { var scorer = new QueryScorer(q); var formatter = new SimpleHTMLFormatter("<em>", "</em>"); var highlighter = new Highlighter(formatter, scorer); highlighter.SetTextFragmenter(new SimpleFragmenter(250)); var stream = new StandardAnalyzer(Version.LUCENE_29).TokenStream("bodyText", new StringReader(text)); return highlighter.GetBestFragments(stream, text, 3, "..."); }
private SearchResults Query(Query query, string cachePath, int offset, int resultAmount, bool includeContentSnippets, int limit) { SearchResults results = new SearchResults(); Lucene.Net.Store.FSDirectory directory = Lucene.Net.Store.FSDirectory.Open(new System.IO.DirectoryInfo(_indexPath)); IndexReader reader = IndexReader.Open(directory, true); IndexSearcher searcher = new IndexSearcher(reader); // Request all results up to the page we actually need (this is quick) TopDocs topDocs = searcher.Search(query, limit); ScoreDoc[] hits = topDocs.scoreDocs; // Save num results results.NumResults = hits.Length; // Only loop through the hits that should be on the page for (int i = offset; i < hits.Length && i < offset + resultAmount; i++) { int docId = hits[i].doc; Document doc = searcher.Doc(docId); if (includeContentSnippets) { // Read the whole file from the cache to find the content snippet. string filepath = CacheManager.GetRelativeCacheFileName(doc.Get("uri"), "GET"); string documentContent = Utils.ReadFileAsString(cachePath + filepath); // Remove unusable stuff. documentContent = HtmlUtils.RemoveHead(documentContent); documentContent = HtmlUtils.ExtractText(documentContent); // Find (and highlight) content snippets QueryScorer scorer = new QueryScorer(query); SimpleHTMLFormatter formatter = new SimpleHTMLFormatter("<b>", "</b>"); Highlighter highlighter = new Highlighter(formatter, scorer); highlighter.SetTextFragmenter(new SentenceFragmenter()); TokenStream stream = _analyzer.TokenStream("content", new StringReader(documentContent)); // Get 1 fragment string contentSnippet = ""; try { string[] fragments = highlighter.GetBestFragments(stream, documentContent, 1); if (fragments.Length > 0) { contentSnippet = HtmlUtils.StripTagsCharArray(fragments[0], false); // If the content snippet does end in mid of a sentence, let's append "..." if (!new char[] { '.', '!', '?' }.Contains(contentSnippet[contentSnippet.Length - 1])) { contentSnippet += "..."; } } } catch (Exception) { } results.AddLuceneDocument(doc, contentSnippet); } else { results.AddLuceneDocument(doc); } } searcher.Close(); return results; }
/// <summary> /// /// </summary> /// <param name="IndexField"></param> /// <param name="LuceneIndex"></param> /// <param name="searchQuery"></param> /// <returns></returns> public static string GetHighlight(string IndexField, string LuceneIndex, string searchQuery, string highlightField) { string hightlightText = string.Empty; var formatter = new SimpleHTMLFormatter("<span class=\"umbSearchHighlight\">", "</span>"); var highlighter = new Highlighter(formatter, FragmentScorer(searchQuery, highlightField, LuceneIndex)); var tokenStream = new SnowballAnalyzer("English").TokenStream(highlightField, new StringReader(IndexField)); string tmp = highlighter.GetBestFragments(tokenStream, IndexField, 3, "..."); if (tmp.Length > 0) hightlightText = tmp + "..."; return hightlightText; }
/// <summary> /// �������ڡ��������ķ��� /// </summary> private void search() { DateTime start = DateTime.Now; //try //{ // } //catch (Exception e) //{ // Response.Write("<script type='text/javascript'>window.alert(' " + e.ToString() + " ');</script>"); //} // �������� //�������ڡ�index��Ŀ¼�� string indexDirectory1 = Server.MapPath("./")+"index\\1.5\\"; IndexSearcher searcher1 = new IndexSearcher(indexDirectory1); string indexDirectory2 = Server.MapPath("./") + "index\\1.4\\"; IndexSearcher searcher2 = new IndexSearcher(indexDirectory2); //System.String index3 = @"\\192.168.1.130\index\1.5"; //Lucene.Net.Index.IndexReader reader3; Lucene.Net.Search.ParallelMultiSearcher searcher; /* if (System.IO.Directory.Exists(index3))//�Ƿ����Ŀ¼ { reader3 = Lucene.Net.Index.IndexReader.Open(index3);//��ȡ������ȡ����ʵ�����ô�Ϊ������ȡ��reader��ȡ�ļ���Ϊindex���ļ���(Ŀ¼) IndexSearcher searcher3 = new IndexSearcher(reader3); searcher = new Lucene.Net.Search.ParallelMultiSearcher(new Lucene.Net.Search.Searchable[] { searcher3,searcher1, searcher2 }); } else { searcher = new Lucene.Net.Search.ParallelMultiSearcher(new Lucene.Net.Search.Searchable[] { searcher1, searcher2 }); }*/ searcher = new Lucene.Net.Search.ParallelMultiSearcher(new Lucene.Net.Search.Searchable[] { searcher1, searcher2 }); //====================>(1)�����ѯ����<============================================== //System.String field = "text";//���ڱ�����ҳɹ����ļ����ڵ�Ŀ¼ //QueryParser parser = new QueryParser(field, new StandardAnalyzer());//������ѯ��������ָ����field(������ָcontents��Ŀ¼), analyzer������������ //====================>(2)�����ѯ����<============================================== string[] strs = new string[] { "text", "path","fullpath","keywords","description","title" }; Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.MultiFieldQueryParser(strs, new StandardAnalyzer()); parser.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR); // ��������� this.Results.Columns.Add("link", typeof(string)); this.Results.Columns.Add("title", typeof(string)); this.Results.Columns.Add("sample", typeof(string)); this.Results.Columns.Add("path", typeof(string)); // ���� Query query = parser.Parse(this.Query);//����һ����ѯ����ָ����ѯ����Query_condition������ this.Query_txt.Text�� Hits hits = searcher.Search(query); this.total = hits.Length(); // ���� �����Ĺؼ���,Ĭ����<b>..</b> // �����ָ��<read>..</read> SimpleHTMLFormatter simpleHTMLFormatter = new SimpleHTMLFormatter("<B style='color:Red;'>", "</B>"); Highlighter highlighter = new Highlighter(simpleHTMLFormatter, new QueryScorer(query)); // ���һ�������Ҫ���صģ����������ݳ��� // ���̫С����ֻ�����ݵĿ�ʼ���ֱ��������������ҷ��ص�����Ҳ�� ̫����ʱ̫�˷��ˡ� highlighter.SetTextFragmenter(new SimpleFragmenter(100)); // initialize startAt this.startAt = initStartAt(); // ��ʾ�������ġ������Ŀ int resultsCount = smallerOf(total, this.maxResults + this.startAt); for (int i = startAt; i < resultsCount; i++) { // �õ����е��ĵ� Document doc = hits.Doc(i); //��ӽ�β����֤��β������Ų������� string title = doc.Get("title") + " "; // �õ��ļ����� System.String text =Search.CutString( doc.Get("text"),480); // �õ��ļ�������ȷ·�� string path = doc.Get("path"); string orpath = doc.Get("fullpath"); Lucene.Net.Analysis.TokenStream titkeStream = analyzer.TokenStream("title", new System.IO.StringReader(title));//��Ŀ Lucene.Net.Analysis.TokenStream tokenStream = analyzer.TokenStream("text", new System.IO.StringReader(text));//ժҪ Lucene.Net.Analysis.TokenStream pathStream = analyzer.TokenStream("path", new System.IO.StringReader(path));//��ʾ��·�� System.String result = highlighter.GetBestFragments(tokenStream, text, 2, "..."); string tresult = highlighter.GetBestFragments(titkeStream, title, 2, ".."); string pathwords = highlighter.GetBestFragments(pathStream, path, 2, ".."); //·����ʱ��ʾ // ����һ������ʾ�������Ľ�� DataRow row = this.Results.NewRow(); if (tresult == "") { row["title"] = title; } else { row["title"] = tresult; } if (getpath(row, System.IO.Path.GetFileName(path.Replace("\\", "/")))) { row["link"]=getFullpath( System.IO.Path.GetFileName(doc.Get("path"))); } else { row["link"] =orpath; if (pathwords=="") { row["path"] = orpath; } else { row["path"] = pathwords.Replace("\\", "/"); } } if (result == ""){ row["sample"] = text; } else { row["sample"] = result; } this.Results.Rows.Add(row); } searcher.Close(); // �����Ϣ this.duration = DateTime.Now - start; this.fromItem = startAt + 1; this.toItem = smallerOf(startAt + maxResults, total); }
public static List<SearchRecord> SearchPage(out Query query, out Dictionary<string, int> statistics,List<string> filterList,int pageSize, int pageNum,bool fileInclude,bool highLight) { List<SearchRecord> recordList = new List<SearchRecord>(); query = GetQuery(fileInclude); statistics = new Dictionary<string, int>(); try { #region Add Index Dir //SupportClass.FileUtil.WriteToLog(@"D:\Indexer\log\search.log", "begin to init searcher."); List<IndexSearcher> searcherList = new List<IndexSearcher>(); if (searchIndexList.Count > 0) { foreach (IndexSet indexSet in searchIndexList) { if (indexSet.Type == IndexTypeEnum.Increment) continue; searcherList.Add(new IndexSearcher(indexSet.Path)); } } else { foreach (IndexSet indexSet in indexFieldsDict.Keys) { if (indexSet.Type == IndexTypeEnum.Increment) continue; searcherList.Add(new IndexSearcher(indexSet.Path)); } } if (fileInclude) { searcherList.Add(new IndexSearcher(fileSet.Path)); } #endregion //SupportClass.FileUtil.WriteToLog(@"D:\Indexer\log\search.log", "begin to Search."); ParallelMultiSearcher searcher = new ParallelMultiSearcher(searcherList.ToArray()); TopDocs topDocs = searcher.Search(query.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; Highlighter highlighter = new Highlighter(new QueryScorer(query)); highlighter.SetTextFragmenter(new SimpleFragmenter(SupportClass.FRAGMENT_SIZE)); #region Order by Score //SupportClass.FileUtil.WriteToLog(@"D:\Indexer\log\search.log", "Add to list."); List<ScoreDoc> scoreDocList = new List<ScoreDoc>(); for (int i = 0; i < scoreDocs.Length; i++) { float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; scoreDocList.Add(scoreDocs[i]); } //SupportClass.FileUtil.WriteToLog(@"D:\Indexer\log\search.log", "Begin to sort."); scoreDocList.Sort(delegate(ScoreDoc x, ScoreDoc y) { if (x.score > y.score) return -1; else if (x.score == y.score) return 0; else return 1; }); //SupportClass.FileUtil.WriteToLog(@"D:\Indexer\log\search.log", "End sort."); #endregion #region Doc Statistic int start = 0, end = scoreDocList.Count; if (pageSize > 0 && pageNum >= 1) { start = pageSize * (pageNum - 1)+1; end = pageNum * pageSize; } int current = 1; SpecialFieldSelector sfSelector = new SpecialFieldSelector(SupportClass.TableFileNameField); for (int recNum = 0; recNum < scoreDocList.Count; recNum++) { float score = scoreDocList[recNum].score; if (score < searchSet.MinScore) continue; Document fDoc = searcher.Doc(scoreDocList[recNum].doc,sfSelector); string caption = fDoc.Get(SupportClass.TableFileNameField); if ((caption.Equals(SupportClass.TFNFieldValue) == false)) { if (sfpDict.ContainsKey(caption) == false || nameIndexDict.ContainsKey(caption) == false) { continue; } } if (statistics.ContainsKey(caption)) { statistics[caption] = statistics[caption] + 1; } else { statistics.Add(caption, 1); } if (filterList != null && filterList.Count>0) { if (!filterList.Contains(caption)) continue; } #region Add Page if (current >= start && current <= end) { Document doc = searcher.Doc(scoreDocList[recNum].doc); doc.RemoveField(SupportClass.TableFileNameField); Dictionary<string, IndexField> fpDict = sfpDict[caption]; Field[] fields = new Field[doc.GetFields().Count]; doc.GetFields().CopyTo(fields, 0); #region SearchField List<SearchField> sfList = new List<SearchField>(); foreach (Field field in fields) { string key = field.Name(); string value = field.StringValue(); string result = ""; if (highLight) { string output = SupportClass.String.DropHTML(value); TokenStream tokenStream = analyzer.TokenStream(key, new System.IO.StringReader(output)); result = highlighter.GetBestFragment(tokenStream, output); if (result != null && string.IsNullOrEmpty(result.Trim()) == false) { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, result, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } else { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, value, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } } else { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, value, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } } #endregion if (caption.Equals(SupportClass.TFNFieldValue) == false) { IndexSet indexSet = nameIndexDict[caption]; recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score)); } else { recordList.Add(new SearchRecord("文件", "文件", "文件", score, sfList)); } } #endregion current++; } //SupportClass.FileUtil.WriteToLog(@"D:\Indexer\log\search.log", "End of Search."); #endregion } catch (Exception) { //SupportClass.FileUtil.WriteToLog(@"D:\Indexer\log\search_log.txt", e.StackTrace.ToString()); } return recordList; }
public static List<SearchRecord> HighLightSearchFile() { List<SearchRecord> recordList = new List<SearchRecord>(); try { Query query = GetFileQuery(); IndexSearcher presearcher = new IndexSearcher(fileSet.Path); ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher }); #if DEBUG System.Console.WriteLine(query.ToString()); #endif Highlighter highlighter = new Highlighter(new QueryScorer(query)); highlighter.SetTextFragmenter(new SimpleFragmenter(SupportClass.FRAGMENT_SIZE)); TopDocs topDocs = searcher.Search(query.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; for (int i = 0; i < scoreDocs.Length; i++) { float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; Document doc = searcher.Doc(scoreDocs[i].doc); string name = doc.Get("Name"); string path = doc.Get("Path"); string content = doc.Get("Content"); TokenStream nts = analyzer.TokenStream("Name", new System.IO.StringReader(name)); TokenStream pts = analyzer.TokenStream("Path", new System.IO.StringReader(path)); TokenStream cts = analyzer.TokenStream("Content", new System.IO.StringReader(content)); string nr = "",pr="",cr=""; nr = highlighter.GetBestFragment(nts, name); pr = highlighter.GetBestFragment(pts, path); cr = highlighter.GetBestFragment(cts, content); SearchField nf; SearchField pf; SearchField cf; if (nr != null && string.IsNullOrEmpty(nr.Trim()) == false) { nf = new SearchField("文件名", "文件名", name, nr, 1.0f, true, true, 0); } else { nf = new SearchField("文件名", "文件名", name, name, 1.0f, true, true, 0); } if (pr != null && string.IsNullOrEmpty(pr.Trim()) == false) { pf = new SearchField("路径", "路径", path, pr, 1.0f, false, true, 0); } else { pf = new SearchField("路径", "路径", path, path, 1.0f, false, true, 0); } if (cr != null && string.IsNullOrEmpty(cr.Trim()) == false) { cf = new SearchField("内容", "内容", content, cr, 1.0f, false, true, 0); } else { cf = new SearchField("内容", "内容", content, content, 1.0f, false, true, 0); } recordList.Add(new SearchRecord("文件", "文件", "文件",score, nf, pf, cf)); } } catch (Exception e) { SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString()); } return recordList; }
public static List<SearchRecord> HighLightSearch(out Dictionary<string,List<int>> statistics) { List<SearchRecord> recordList = new List<SearchRecord>(); statistics = new Dictionary<string,List<int>>(); try { if (searchIndexList.Count > 0) { foreach (IndexSet indexSet in searchIndexList) { if (indexSet.Type == IndexTypeEnum.Increment) continue; Query query = GetQuery(indexSet); Source source = indexDict[indexSet]; Dictionary<string, IndexField> fpDict = source.FieldDict; //IndexSearcher searcher = new IndexSearcher(indexSet.Path); IndexSearcher presearcher = new IndexSearcher(indexSet.Path); ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher }); #if DEBUG System.Console.WriteLine(query.ToString()); #endif Highlighter highlighter = new Highlighter(new QueryScorer(query)); highlighter.SetTextFragmenter(new SimpleFragmenter(SupportClass.FRAGMENT_SIZE)); TopDocs topDocs = searcher.Search(query.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; List<int> posList=new List<int>(); for (int i = 0; i < scoreDocs.Length; i++) { float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; Document doc = searcher.Doc(scoreDocs[i].doc); Field[] fields = new Field[doc.GetFields().Count]; doc.GetFields().CopyTo(fields, 0); List<SearchField> sfList = new List<SearchField>(); foreach (Field field in fields) { string key = field.Name(); string value = field.StringValue(); string output = SupportClass.String.DropHTML(value); TokenStream tokenStream = analyzer.TokenStream(key, new System.IO.StringReader(output)); string result = ""; result = highlighter.GetBestFragment(tokenStream, output); if (result != null && string.IsNullOrEmpty(result.Trim()) == false) { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, result, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } else { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, value, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } } recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score)); posList.Add(recordList.Count - 1); } try { statistics.Add(indexSet.Caption, posList); } catch (Exception) { int i = 2; while (statistics.ContainsKey(indexSet.Caption + i.ToString())) i++; statistics.Add(indexSet.Caption + i.ToString(), posList); } } } else { foreach (IndexSet indexSet in indexFieldsDict.Keys) { if (indexSet.Type == IndexTypeEnum.Increment) continue; Query query = GetQuery(indexSet); Source source = indexDict[indexSet]; Dictionary<string, IndexField> fpDict = source.FieldDict; //IndexSearcher searcher = new IndexSearcher(indexSet.Path); IndexSearcher presearcher = new IndexSearcher(indexSet.Path); ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher }); #if DEBUG System.Console.WriteLine(query.ToString()); #endif Highlighter highlighter = new Highlighter(new QueryScorer(query)); highlighter.SetTextFragmenter(new SimpleFragmenter(SupportClass.FRAGMENT_SIZE)); TopDocs topDocs = searcher.Search(query.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; List<int> posList=new List<int>(); for (int i = 0; i < scoreDocs.Length; i++) { float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; Document doc = searcher.Doc(scoreDocs[i].doc); Field[] fields = new Field[doc.GetFields().Count]; doc.GetFields().CopyTo(fields, 0); List<SearchField> sfList = new List<SearchField>(); foreach (Field field in fields) { string key = field.Name(); string value = field.StringValue(); string output = SupportClass.String.DropHTML(value); TokenStream tokenStream = analyzer.TokenStream(key, new System.IO.StringReader(output)); string result = ""; result = highlighter.GetBestFragment(tokenStream, output); if (result != null && string.IsNullOrEmpty(result.Trim()) == false) { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, result, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } else { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, value, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } } recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score)); posList.Add(recordList.Count - 1); } try { statistics.Add(indexSet.Caption, posList); } catch (Exception) { int i = 2; while (statistics.ContainsKey(indexSet.Caption + i.ToString())) i++; statistics.Add(indexSet.Caption + i.ToString(), posList); } } } } catch (Exception e) { SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString()); } return recordList; }