public void SearchGenericWithCollector() { const int NumObjects = 10; const int MinNumberInclusive = 0; const int MaxNumberExclusive = 8; WriteTestObjects(NumObjects, obj => obj.ToDocument()); Assert.AreEqual(NumObjects, writer.NumDocs()); using (Searcher searcher = new IndexSearcher(dir, true)) { TopFieldCollector collector = TopFieldCollector.Create( new Sort(new SortField("Number", SortField.LONG, true)), NumObjects, false, false, false, false); searcher.Search <TestObject>( NumericRangeQuery.NewLongRange("Number", MinNumberInclusive, MaxNumberExclusive, true, false), collector); TopDocs topDocs = collector.TopDocs(); VerifyTopDocsTestObjects(searcher, topDocs, MinNumberInclusive, MaxNumberExclusive, true); } }
public void TestEarlyTerminationDifferentSorter() { // test that the collector works correctly when the index was sorted by a // different sorter than the one specified in the ctor. CreateRandomIndexes(5); int numHits = TestUtil.NextInt(Random(), 1, numDocs / 10); Sort sort = new Sort(new SortField("ndv2", SortField.Type_e.LONG, false)); bool fillFields = Random().nextBoolean(); bool trackDocScores = Random().nextBoolean(); bool trackMaxScore = Random().nextBoolean(); bool inOrder = Random().nextBoolean(); TopFieldCollector collector1 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); TopFieldCollector collector2 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); IndexSearcher searcher = NewSearcher(reader); int iters = AtLeast(5); for (int i = 0; i < iters; ++i) { TermQuery query = new TermQuery(new Term("s", RandomInts.RandomFrom(Random(), terms))); searcher.Search(query, collector1); Sort different = new Sort(new SortField("ndv2", SortField.Type_e.LONG)); searcher.Search(query, new EarlyTerminatingSortingCollectorHelper(collector2, different, numHits)); assertTrue(collector1.TotalHits >= collector2.TotalHits); AssertTopDocsEquals(collector1.TopDocs().ScoreDocs, collector2.TopDocs().ScoreDocs); } }
public IEnumerable <Movie> Search(string keywords, int?page, out int count, out string words) { string indexPath = HttpContext.Current.Server.MapPath("~/IndexData"); FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory()); IndexReader reader = IndexReader.Open(directory, true); IndexSearcher searcher = new IndexSearcher(reader); //--------------------------------------这里配置搜索条件 //PhraseQuery query = new PhraseQuery(); //foreach(string word in Common.SplitContent.SplitWords(Request.QueryString["SearchKey"])) { // query.Add(new Term("content", word));//这里是 and关系 //} //query.SetSlop(100); //关键词Or关系设置 BooleanQuery queryOr = new BooleanQuery(); TermQuery query = null; words = ""; foreach (string word in SplitWords(keywords)) { words += word + ","; query = new TermQuery(new Term("title", word)); queryOr.Add(query, Occur.SHOULD);//这里设置 条件为Or关系 } //-------------------------------------- SortField[] sortfield = new SortField[] { SortField.FIELD_SCORE, new SortField(null, SortField.DOC, true) }; Sort sort = new Sort(sortfield); TopFieldCollector collector = TopFieldCollector.Create(sort, 1000, false, false, false, false); searcher.Search(queryOr, null, collector); int start = 0, pagesize = 24; start = (page ?? 1) - 1; count = collector.TotalHits; ScoreDoc[] docs = collector.TopDocs(start * 24, pagesize).ScoreDocs;//取前十条数据 可以通过它实现LuceneNet搜索结果分页 List <Movie> bookResult = new List <Movie>(); for (int i = 0; i < docs.Length; i++) { int docId = docs[i].Doc; Document doc = searcher.Doc(docId); Movie book = new Movie(); book.MovieTitle = HightLight(keywords, doc.Get("title")); book.MovieContent = doc.Get("content"); book.MovieID = Convert.ToInt32(doc.Get("id")); book.Stars = doc.Get("stars"); book.Director = doc.Get("director"); book.PictureFile = doc.Get("picfile"); book.Grade = double.Parse(doc.Get("grade")); bookResult.Add(book); } return(bookResult); }
// main search method private static IEnumerable <IndexableEntity> _search(string searchQuery, string searchField = "") { // validation if (string.IsNullOrWhiteSpace(searchQuery.Replace("*", string.Empty).Replace("?", string.Empty))) { return(new List <IndexableEntity>()); } // set up lucene searcher using (var searcher = new IndexSearcher(_directory, false)) { var hits_limit = 1000; var analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30); // search by single field if (!string.IsNullOrWhiteSpace(searchField)) { var sort = new Sort(new SortField[] { SortField.FIELD_SCORE, new SortField(searchField, SortField.STRING) }); TopFieldCollector topField = TopFieldCollector.Create(sort, hits_limit, true, true, true, true); var parser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, searchField, analyzer); var query = parseQuery(searchQuery, parser); //var hits = searcher.Search(query, hits_limit).ScoreDocs; searcher.Search(query, topField); var hits = topField.TopDocs().ScoreDocs; var results = _mapLuceneToDataList(hits, searcher); analyzer.Close(); searcher.Dispose(); return(results); } // search by multiple fields (ordered by RELEVANCE) else { var parser = new MultiFieldQueryParser (Lucene.Net.Util.Version.LUCENE_30, new[] { "Id", "TitleName", "SubtitleText" }, analyzer); var query = parseQuery(searchQuery, parser); var hits = searcher.Search(query, null, hits_limit, Sort.INDEXORDER).ScoreDocs; var results = _mapLuceneToDataList(hits, searcher); analyzer.Close(); searcher.Dispose(); return(results); } } }
private static IEnumerable <IndexableEntity> _searchMultiField(Dictionary <string, string> searchTerms) { // validation if (searchTerms == null) { return(new List <IndexableEntity>()); } // set up lucene searcher using (var searcher = new IndexSearcher(_directory, false)) { var hits_limit = 1000; var analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30); var mainQuery = new BooleanQuery(); TopFieldCollector topField = null; foreach (var pair in searchTerms) { if (!string.IsNullOrWhiteSpace(pair.Key)) { var sort = new Sort(new SortField[] { SortField.FIELD_SCORE, new SortField(pair.Key, SortField.STRING) }); topField = TopFieldCollector.Create(sort, hits_limit, true, true, true, true); var parser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, pair.Key, analyzer); var query = parseQuery(pair.Value, parser); mainQuery.Add(query, Occur.MUST); } } searcher.Search(mainQuery, topField); var hits = topField.TopDocs().ScoreDocs; var results = _mapLuceneToDataList(hits, searcher); analyzer.Close(); searcher.Dispose(); return(results); } }
public IEnumerable <LuceneDoc> Search(string searchTerm) { IndexSearcher searcher = new IndexSearcher(luceneIndexDirectory); QueryParser parser = new QueryParser(Version.LUCENE_30, "ImageInfoList.Description", analyzer); TopFieldCollector fieldCollector = TopFieldCollector.Create(Sort.RELEVANCE, 1000, true, true, true, true); Query query = parser.Parse(searchTerm); searcher.Search(query, fieldCollector); var results = fieldCollector.TopDocs().ScoreDocs; List <string> fileNames = new List <string>(); foreach (ScoreDoc scoreDoc in results) { var doc = searcher.Doc(scoreDoc.Doc); var luceneDoc = doc.ToObject <LuceneDoc>(); luceneDoc.Score = scoreDoc.Score; yield return(luceneDoc); //fileNames.Add(luceneDoc.FileName); } //return fileNames; //List<SampleDataFileRow> results = new List<SampleDataFileRow>(); //SampleDataFileRow sampleDataFileRow = null; //for (int i = 0; i < hitsFound.Length(); i++) //{ // sampleDataFileRow = new SampleDataFileRow(); // Document doc = hitsFound.Doc(i); // sampleDataFileRow.LineNumber = int.Parse(doc.Get("LineNumber")); // sampleDataFileRow.LineText = doc.Get("LineText"); // float score = hitsFound.Score(i); // sampleDataFileRow.Score = score; // results.Add(sampleDataFileRow); //} //return results.OrderByDescending(x => x.Score).ToList(); //return null; }
/// <summary> /// Search, sorting by <see cref="Sort"/>, and computing /// drill down and sideways counts. /// </summary> public virtual DrillSidewaysResult Search(DrillDownQuery query, Filter filter, FieldDoc after, int topN, Sort sort, bool doDocScores, bool doMaxScore) { if (filter != null) { query = new DrillDownQuery(config, filter, query); } if (sort != null) { int limit = searcher.IndexReader.MaxDoc; if (limit == 0) { limit = 1; // the collector does not alow numHits = 0 } topN = Math.Min(topN, limit); TopFieldCollector hitCollector = TopFieldCollector.Create(sort, topN, after, true, doDocScores, doMaxScore, true); DrillSidewaysResult r = Search(query, hitCollector); return(new DrillSidewaysResult(r.Facets, hitCollector.TopDocs())); } else { return(Search(after, query, topN)); } }
public void TestEarlyTermination_() { CreateRandomIndexes(5); int numHits = TestUtil.NextInt(Random(), 1, numDocs / 10); Sort sort = new Sort(new SortField("ndv1", SortField.Type_e.LONG, false)); bool fillFields = Random().nextBoolean(); bool trackDocScores = Random().nextBoolean(); bool trackMaxScore = Random().nextBoolean(); bool inOrder = Random().nextBoolean(); TopFieldCollector collector1 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); TopFieldCollector collector2 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); IndexSearcher searcher = NewSearcher(reader); int iters = AtLeast(5); for (int i = 0; i < iters; ++i) { TermQuery query = new TermQuery(new Term("s", RandomInts.RandomFrom(Random(), terms))); searcher.Search(query, collector1); searcher.Search(query, new EarlyTerminatingSortingCollector(collector2, sort, numHits)); } assertTrue(collector1.TotalHits >= collector2.TotalHits); AssertTopDocsEquals(collector1.TopDocs().ScoreDocs, collector2.TopDocs().ScoreDocs); }
public override TopDocs TopDocs() { return(_wrapped.TopDocs()); }
/// <summary> /// 多索引文件联合查询搜索(带分页) /// </summary> /// <typeparam name="T">返回结果数据类型</typeparam> /// <param name="indexPaths">索引文件路径集合</param> /// <param name="query">Query</param> /// <param name="sort">排序</param> /// <param name="pageIndex">当前搜索页</param> /// <param name="pageSize">每页显示数</param> /// <param name="count">总搜索结果数</param> /// <returns></returns> public static List <T> Search <T>(string[] indexPaths, Query query, Sort sort, int pageIndex, int pageSize, out int count) where T : BaseIndexModel { count = 0; if (null == query) { return(null); } if (pageIndex < 1) { pageIndex = 1; } if (pageSize < 1) { pageSize = 1; } //起始搜索位置 int start = (pageIndex - 1) * pageSize; if (null == indexPaths || indexPaths.Length < 1) { return(null); } List <IndexSearcher> searchers = new List <IndexSearcher>(); foreach (var indexPath in indexPaths) { if (string.IsNullOrWhiteSpace(indexPath)) { continue; } //打开索引文件 FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory()); if (null == directory) { continue; } //检测索引文件是否存在 bool isExist = IndexReader.IndexExists(directory); if (!isExist) { continue; } //创建一个只读的索引文件读取实例 IndexReader reader = IndexReader.Open(directory, true); //实例化IndexSearcher搜索器 IndexSearcher searcher = new IndexSearcher(reader); searchers.Add(searcher); } if (searchers.Count < 1) { return(null); } MultiSearcher multiSearcher = new MultiSearcher(searchers.ToArray()); //Collector TopFieldCollector results = TopFieldCollector.Create(sort, start + pageSize, false, false, false, false); //搜索 multiSearcher.Search(query, results); //总命中率(精算) count = results.TotalHits; //获取当前页文档 var docs = results.TopDocs(start, pageSize).ScoreDocs; //定义一个结果返回变量 List <T> list = new List <T>(); //遍历当前页文档,并转换为需要的结果类型 foreach (var scoreDoc in docs) { Document doc = multiSearcher.Doc(scoreDoc.Doc); var data = new IndexFactory(doc).Result as T; if (null != data) { list.Add(data); } } return(list); }
public IList <T> Retrieve <T>(string keyword, out int totalCount, int pageIndex = 1, int pageSize = 10) where T : class, new() { using (IndexSearcher searcher = new IndexSearcher(directory, true)) { List <string> queries = new List <string>(); List <string> fields = new List <string>(); List <Occur> flags = new List <Occur>(); List <SortField> sortFields = new List <SortField>(); PropertyInfo[] properties = typeof(T).GetProperties(); foreach (var property in properties) { if (property.IsDefined(typeof(OccurAttribute), false)) { OccurAttribute attribute = property.GetCustomAttribute(typeof(OccurAttribute)) as OccurAttribute; Occur occur = attribute.Occur; if (!occur.Equals(Occur.MUST_NOT)) { ///这里queriesfields,flags一一对应,见MultiFieldQueryParser.Parse方法说明 queries.Add(keyword); fields.Add(property.Name); flags.Add(occur); } } if (property.IsDefined(typeof(SortAttribute), false)) { SortAttribute attribute = property.GetCustomAttribute(typeof(SortAttribute)) as SortAttribute; int sortField = attribute.Type; bool reverse = attribute.Reverse; sortFields.Add(new SortField(property.Name, sortField, reverse)); } } Query query = MultiFieldQueryParser.Parse(Lucene.Net.Util.Version.LUCENE_30, queries?.ToArray(), fields?.ToArray(), flags?.ToArray(), analyzer); //Query queryR= new TermRangeQuery() TopDocs tds; int startRowIndex = (pageIndex - 1) * pageSize; //分页 if (sortFields.Count > 0) { Sort sort = new Sort(sortFields?.ToArray()); TopFieldCollector collector = TopFieldCollector.Create(sort, pageIndex * pageSize, false, false, false, false); searcher.Search(query, collector); //返回结果 tds = collector.TopDocs(startRowIndex, pageSize); } else { TopScoreDocCollector collector = TopScoreDocCollector.Create(pageIndex * pageSize, false); searcher.Search(query, collector); tds = collector.TopDocs(startRowIndex, pageSize); } totalCount = tds.TotalHits; IList <T> list = new List <T>(); foreach (ScoreDoc sd in tds.ScoreDocs) { Document doc = searcher.Doc(sd.Doc); T searchResult = new T(); foreach (var property in properties) { string value = doc.Get(property.Name); if (!string.IsNullOrEmpty(value)) { Action <object, object> setValue = ReappearMember.CreatePropertySetter(property); if (property.IsDefined(typeof(OccurAttribute), false)) { setValue(searchResult, Preview(value, keyword)); } else { setValue(searchResult, value); } } } list.Add(searchResult); } return(list); } }
/// <summary> /// Retrieve suggestions, specifying whether all terms /// must match ({@code allTermsRequired}) and whether the hits /// should be highlighted ({@code doHighlight}). /// </summary> public virtual IList <LookupResult> Lookup(string key, HashSet <BytesRef> contexts, int num, bool allTermsRequired, bool doHighlight) { if (searcherMgr == null) { throw new InvalidOperationException("suggester was not built"); } BooleanClause.Occur occur; if (allTermsRequired) { occur = BooleanClause.Occur.MUST; } else { occur = BooleanClause.Occur.SHOULD; } TokenStream ts = null; BooleanQuery query; var matchedTokens = new HashSet <string>(); string prefixToken = null; try { ts = queryAnalyzer.TokenStream("", new StringReader(key)); //long t0 = System.currentTimeMillis(); ts.Reset(); var termAtt = ts.AddAttribute <CharTermAttribute>(); var offsetAtt = ts.AddAttribute <OffsetAttribute>(); string lastToken = null; query = new BooleanQuery(); int maxEndOffset = -1; matchedTokens = new HashSet <string>(); while (ts.IncrementToken()) { if (lastToken != null) { matchedTokens.Add(lastToken); query.Add(new TermQuery(new Term(TEXT_FIELD_NAME, lastToken)), occur); } lastToken = termAtt.ToString(); if (lastToken != null) { maxEndOffset = Math.Max(maxEndOffset, offsetAtt.EndOffset()); } } ts.End(); if (lastToken != null) { Query lastQuery; if (maxEndOffset == offsetAtt.EndOffset()) { // Use PrefixQuery (or the ngram equivalent) when // there was no trailing discarded chars in the // string (e.g. whitespace), so that if query does // not end with a space we show prefix matches for // that token: lastQuery = GetLastTokenQuery(lastToken); prefixToken = lastToken; } else { // Use TermQuery for an exact match if there were // trailing discarded chars (e.g. whitespace), so // that if query ends with a space we only show // exact matches for that term: matchedTokens.Add(lastToken); lastQuery = new TermQuery(new Term(TEXT_FIELD_NAME, lastToken)); } if (lastQuery != null) { query.Add(lastQuery, occur); } } if (contexts != null) { BooleanQuery sub = new BooleanQuery(); query.Add(sub, BooleanClause.Occur.MUST); foreach (BytesRef context in contexts) { // NOTE: we "should" wrap this in // ConstantScoreQuery, or maybe send this as a // Filter instead to search, but since all of // these are MUST'd, the change to the score won't // affect the overall ranking. Since we indexed // as DOCS_ONLY, the perf should be the same // either way (no freq int[] blocks to decode): // TODO: if we had a BinaryTermField we could fix // this "must be valid ut8f" limitation: sub.Add(new TermQuery(new Term(CONTEXTS_FIELD_NAME, context.Utf8ToString())), BooleanClause.Occur.SHOULD); } } } finally { IOUtils.CloseWhileHandlingException(ts); } // TODO: we could allow blended sort here, combining // weight w/ score. Now we ignore score and sort only // by weight: Query finalQuery = FinishQuery(query, allTermsRequired); //System.out.println("finalQuery=" + query); // Sort by weight, descending: TopFieldCollector c = TopFieldCollector.Create(SORT, num, true, false, false, false); // We sorted postings by weight during indexing, so we // only retrieve the first num hits now: Collector c2 = new EarlyTerminatingSortingCollector(c, SORT, num); IndexSearcher searcher = searcherMgr.Acquire(); IList <LookupResult> results = null; try { //System.out.println("got searcher=" + searcher); searcher.Search(finalQuery, c2); TopFieldDocs hits = (TopFieldDocs)c.TopDocs(); // Slower way if postings are not pre-sorted by weight: // hits = searcher.search(query, null, num, SORT); results = createResults(searcher, hits, num, key, doHighlight, matchedTokens, prefixToken); } finally { searcherMgr.Release(searcher); } //System.out.println((System.currentTimeMillis() - t0) + " msec for infix suggest"); //System.out.println(results); return(results); }
/// <summary> /// 字段分组统计(支持分页) /// </summary> /// <param name="indexSearcher"></param> /// <param name="pageSize"></param> /// <param name="pageIndex"></param> /// <param name="query"></param> /// <param name="recordCount"></param> /// <param name="groupKeyValueList">分组结果</param> /// <param name="filter"></param> /// <param name="sortFields"></param> /// <returns></returns> public static Dictionary <Document, ScoreDoc> SelectGroup(IndexSearcher indexSearcher, int pageSize, int pageIndex, Query query, out int recordCount, out GroupKeyValueList groupKeyValueList, Filter filter = null, params SortField[] sortFields) { recordCount = 0; groupKeyValueList = null; Dictionary <Document, ScoreDoc> dictPager = new Dictionary <Document, ScoreDoc>(); int maxDoc = indexSearcher.IndexReader.MaxDoc; if (maxDoc == 0) {//返回索引可用的最大的索引ID return(dictPager); } TopDocs docs = null; string key = string.Format(CACHE_KEY, query.ToString(), string.Join("_", sortFields.Select(item => item.ToString())), filter == null ? string.Empty : filter.ToString()); string listKey = key + string.Format(",PAGE_INDEX:{0},PAGE_SIZE:{1}", pageIndex, pageSize); string groupKey = "GROUP:::" + key; docs = MemCache.MemoryCacheBus.Get(listKey) as TopDocs; groupKeyValueList = MemCache.MemoryCacheBus.Get(groupKey) as GroupKeyValueList; if (docs == null || groupKeyValueList == null) { //https://searchcode.com/codesearch/view/7233825/ int start = pageIndex * pageSize; start = Math.Min(start, maxDoc); using (GroupCollectorField groupCollectorField = new GroupCollectorField("NameValue")) { if (sortFields.Length > 0) {//先排序,后分页 Sort sort = new Sort(); sort.SetSort(sortFields); TopFieldCollector topFieldCollector = TopFieldCollector.Create(sort, start, true, false, false, !query.CreateWeight(indexSearcher).GetScoresDocsOutOfOrder()); GroupCollectorWrapper groupCollectorWrapper = new GroupCollectorWrapper(start, topFieldCollector, groupCollectorField); indexSearcher.Search(query, filter, groupCollectorWrapper); start = start - pageSize; if (start < 0) { start = 0; } docs = topFieldCollector.TopDocs(start, pageSize);//只返回前start条记录 } else {//不支持排序,只有分页 TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.Create(start + 1, !query.CreateWeight(indexSearcher).GetScoresDocsOutOfOrder()); GroupCollectorWrapper groupCollectorWrapper = new GroupCollectorWrapper(start, topScoreDocCollector, groupCollectorField); indexSearcher.Search(query, filter, groupCollectorWrapper); start = start - pageSize; if (start < 0) { start = 0; } docs = topScoreDocCollector.TopDocs(start, pageSize); //只返回前start条记录 } groupCollectorField.GroupKeyValueDocCountList.Sort(); //排序 groupKeyValueList = ObjectExtensions.Clone(groupCollectorField.GroupKeyValueDocCountList); if (docs != null && groupKeyValueList != null) { TimeSpan timeSpan = TimeSpan.FromMinutes(CACHE_TIME); MemCache.MemoryCacheBus.Insert(groupKey, groupKeyValueList, timeSpan); MemCache.MemoryCacheBus.Insert(listKey, docs, timeSpan); } } } #region 返回搜索的结果集合 if (docs != null) { recordCount = docs.TotalHits; //搜索结果总数量 ScoreDoc[] scoreDocs = docs.ScoreDocs; //搜索的结果集合 if (scoreDocs != null) { foreach (ScoreDoc scoreDoc in scoreDocs) { if (scoreDoc.Doc != int.MaxValue && scoreDoc.Score != System.Single.NegativeInfinity) { dictPager.Add(indexSearcher.Doc(scoreDoc.Doc), scoreDoc); } } } } if (dictPager.Count == 0) {//如果没有取出符合条件的结果删除缓存。wyp MemCache.MemoryCacheBus.Delete(listKey); } if (groupKeyValueList.Count == 0) {//如果没有取出符合条件的结果删除缓存。wyp MemCache.MemoryCacheBus.Delete(groupKey); } #endregion groupKeyValueList = groupKeyValueList ?? new GroupKeyValueList(0); return(dictPager); }
/// <summary> /// 分页查询(高效率)。wyp /// http://blog.csdn.net/smallearth/article/details/7980226 /// </summary> /// <param name="indexSearcher"></param> /// <param name="pageSize"></param> /// <param name="pageIndex"></param> /// <param name="query"></param> /// <param name="recordCount"></param> /// <param name="filter"></param> /// <param name="sortFields"></param> /// <returns></returns> public static Dictionary <Document, ScoreDoc> SelectAfter(IndexSearcher indexSearcher, int pageSize, int pageIndex, Query query, out int recordCount, string culture = "", Filter filter = null, params SortField[] sortFields) { recordCount = 0; Dictionary <Document, ScoreDoc> dictPager = new Dictionary <Document, ScoreDoc>(pageSize); int maxDoc = indexSearcher.IndexReader.MaxDoc; if (maxDoc == 0) {//返回索引可用的最大的索引ID return(dictPager); } TopDocs docs = null; string key = string.Format(CACHE_KEY, query.ToString(), string.Join("_", sortFields.Select(item => item.ToString())), filter == null ? string.Empty : filter.ToString(), culture.ToUpper()); #if LAST_SCORE_DOC #region 先取出某(PageIndex)页文档结果中的最后一个文档,然后在从这个文档开始继续往下取出PageSize大小的文档记录 //http://blog.csdn.net/smallearth/article/details/7980226 ScoreDoc lastScoreDoc = GetLastScoreDoc(indexSearcher, pageSize, pageIndex, query, filter, culture, sortFields); if (lastScoreDoc == null) {//lastScoreDoc等于null,相当于需要取第一页的数据。wyp key += string.Format(",PAGE_SIZE:{0}", pageSize); docs = MemCache.MemoryCacheBus.Get(key) as TopDocs; if (docs == null) { if (sortFields.Length > 0) {//支持排序 Sort sort = new Sort(); sort.SetSort(sortFields); docs = indexSearcher.Search(query, filter, pageSize, sort);//只返回前pageSize条记录 } else { //不支持排序 docs = indexSearcher.Search(query, filter, pageSize); //只返回前pageSize条记录 } if (docs != null) { MemCache.MemoryCacheBus.Insert(key, docs, TimeSpan.FromMinutes(CACHE_TIME)); } } } else { if (lastScoreDoc.Doc < maxDoc) { key += string.Format(",DOC:{0},PAGE_INDEX:{1},PAGE_SIZE:{2}", lastScoreDoc.Doc, pageIndex, pageSize); docs = MemCache.MemoryCacheBus.Get(key) as TopDocs; if (docs == null) { if (sortFields.Length > 0) {//先排序,后分页 int start = pageIndex * pageSize; start = Math.Min(start, maxDoc); Sort sort = new Sort(); sort.SetSort(sortFields); TopFieldCollector topFieldCollector = TopFieldCollector.Create(sort, start, true, false, false, !query.CreateWeight(indexSearcher).GetScoresDocsOutOfOrder()); indexSearcher.Search(query, filter, topFieldCollector); start = start - pageSize; if (start < 0) { start = 0; } docs = topFieldCollector.TopDocs(start, pageSize);//只返回前start条记录 } else {//不支持排序,只有分页 //http://search-lucene.com/c/Lucene:core/src/java/org/apache/lucene/search/IndexSearcher.java||IndexSearcher 482行 TopScoreDocCollectorEx topScoreDocCollectorEx = TopScoreDocCollectorEx.Create(pageSize, lastScoreDoc, !query.CreateWeight(indexSearcher).GetScoresDocsOutOfOrder()); indexSearcher.Search(query, filter, topScoreDocCollectorEx); docs = topScoreDocCollectorEx.TopDocs(); } if (docs != null) { MemCache.MemoryCacheBus.Insert(key, docs, TimeSpan.FromMinutes(CACHE_TIME)); } } } } #endregion #else #region 先取出前(PageIndex+1)页的文档数,然后在从文档结果中取出最后一页的文档记录。 key += string.Format(",PAGE_INDEX:{0},PAGE_SIZE:{1}", pageIndex, pageSize); docs = WebCache.DataCacheBus.Get(key) as TopDocs; if (docs == null) { //https://searchcode.com/codesearch/view/7233825/ int start = pageIndex * pageSize; start = Math.Min(start, maxDoc); if (sortFields.Length > 0) {//先排序,后分页 Sort sort = new Sort(); sort.SetSort(sortFields); TopFieldCollector topFieldCollector = TopFieldCollector.Create(sort, start, true, false, false, !query.CreateWeight(indexSearcher).GetScoresDocsOutOfOrder()); indexSearcher.Search(query, filter, topFieldCollector); start = start - pageSize; if (start < 0) { start = 0; } docs = topFieldCollector.TopDocs(start, pageSize);//只返回前start条记录 } else {//不支持排序,只有分页 TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.Create(start + 1, !query.CreateWeight(indexSearcher).GetScoresDocsOutOfOrder()); indexSearcher.Search(query, filter, topScoreDocCollector); start = start - pageSize; if (start < 0) { start = 0; } docs = topScoreDocCollector.TopDocs(start, pageSize);//只返回前start条记录 } if (docs != null) { WebCache.DataCacheBus.Insert(key, docs, TimeSpan.FromMinutes(CACHE_TIME)); } } #endregion #endif #region 返回搜索的结果集合 if (docs != null) { recordCount = docs.TotalHits; //搜索结果总数量 ScoreDoc[] scoreDocs = docs.ScoreDocs; //搜索的结果集合 if (scoreDocs != null) { foreach (ScoreDoc scoreDoc in scoreDocs) { if (scoreDoc.Doc != int.MaxValue && scoreDoc.Score != System.Single.NegativeInfinity) { #if LAST_SCORE_DOC lastScoreDoc = scoreDoc;//获取搜索结果中当前页中最后一个ScoreDoc对象 #endif dictPager.Add(indexSearcher.Doc(scoreDoc.Doc), scoreDoc); } } } } if (dictPager.Count == 0) {//如果没有取出符合条件的结果删除缓存。wyp MemCache.MemoryCacheBus.Delete(key); } #if LAST_SCORE_DOC else if (lastScoreDoc != null) { //提前设置好当用户搜索下一页时,需要用到当前这一页的最后一个ScoreDoc对象用于下次搜索使用。wyp key = string.Format(CACHE_KEY, query.ToString(), string.Join("_", sortFields.Select(item => item.ToString())), filter == null ? string.Empty : filter.ToString(), culture.ToUpper()) + string.Format(",PAGE_INDEX:{0},PAGE_SIZE:{1}", pageIndex + 1, pageSize); //提前把下一页用到的LastScoreDoc放入缓存中。wyp MemCache.MemoryCacheBus.Insert(key, lastScoreDoc, TimeSpan.FromMinutes(CACHE_TIME)); } #endif #endregion return(dictPager); }