public IEnumerable<IndexQueryResult> Query(CancellationToken token) { parent.MarkQueried(); using (IndexStorage.EnsureInvariantCulture()) { AssertQueryDoesNotContainFieldsThatAreNotIndexed(indexQuery, parent.viewGenerator); IndexSearcher indexSearcher; using (parent.GetSearcher(out indexSearcher)) { var luceneQuery = GetLuceneQuery(); int start = indexQuery.Start; int pageSize = indexQuery.PageSize; int returnedResults = 0; int skippedResultsInCurrentLoop = 0; bool readAll; bool adjustStart = true; var recorder = new DuplicateDocumentRecorder(indexSearcher, parent, documentsAlreadySeenInPreviousPage, alreadyReturned, fieldsToFetch, parent.IsMapReduce || fieldsToFetch.IsProjection); do { if (skippedResultsInCurrentLoop > 0) { start = start + pageSize - (start - indexQuery.Start); // need to "undo" the index adjustment // trying to guesstimate how many results we will need to read from the index // to get enough unique documents to match the page size pageSize = Math.Max(2, skippedResultsInCurrentLoop) * pageSize; skippedResultsInCurrentLoop = 0; } TopDocs search; int moreRequired; do { token.ThrowIfCancellationRequested(); search = ExecuteQuery(indexSearcher, luceneQuery, start, pageSize, indexQuery); moreRequired = recorder.RecordResultsAlreadySeenForDistinctQuery(search, adjustStart, pageSize, ref start); pageSize += moreRequired * 2; } while (moreRequired > 0); indexQuery.TotalSize.Value = search.TotalHits; adjustStart = false; SetupHighlighter(luceneQuery); for (var i = start; (i - start) < pageSize && i < search.ScoreDocs.Length; i++) { var scoreDoc = search.ScoreDocs[i]; var document = indexSearcher.Doc(scoreDoc.Doc); var indexQueryResult = parent.RetrieveDocument(document, fieldsToFetch, scoreDoc); if (ShouldIncludeInResults(indexQueryResult) == false) { indexQuery.SkippedResults.Value++; skippedResultsInCurrentLoop++; continue; } AddHighlighterResults(indexSearcher, scoreDoc, indexQueryResult); returnedResults++; yield return indexQueryResult; if (returnedResults == indexQuery.PageSize) yield break; } readAll = search.TotalHits == search.ScoreDocs.Length; } while (returnedResults < indexQuery.PageSize && readAll == false); } } }
public IEnumerable<IndexQueryResult> Query() { parent.MarkQueried(); using (IndexStorage.EnsureInvariantCulture()) { AssertQueryDoesNotContainFieldsThatAreNotIndexed(); IndexSearcher indexSearcher; using (parent.GetSearcher(out indexSearcher)) { var luceneQuery = ApplyIndexTriggers(GetLuceneQuery()); int start = indexQuery.Start; int pageSize = indexQuery.PageSize; int returnedResults = 0; int skippedResultsInCurrentLoop = 0; bool readAll; bool adjustStart = true; var recorder = new DuplicateDocumentRecorder(indexSearcher, parent, documentsAlreadySeenInPreviousPage, alreadyReturned, fieldsToFetch, parent.IsMapReduce || fieldsToFetch.IsProjection); do { if (skippedResultsInCurrentLoop > 0) { start = start + pageSize - (start - indexQuery.Start); // need to "undo" the index adjustment // trying to guesstimate how many results we will need to read from the index // to get enough unique documents to match the page size pageSize = Math.Max(2, skippedResultsInCurrentLoop) * pageSize; skippedResultsInCurrentLoop = 0; } TopDocs search; int moreRequired; do { search = ExecuteQuery(indexSearcher, luceneQuery, start, pageSize, indexQuery); moreRequired = recorder.RecordResultsAlreadySeenForDistinctQuery(search, adjustStart, ref start); pageSize += moreRequired * 2; } while (moreRequired > 0); indexQuery.TotalSize.Value = search.TotalHits; adjustStart = false; FastVectorHighlighter highlighter = null; FieldQuery fieldQuery = null; if (indexQuery.HighlightedFields != null && indexQuery.HighlightedFields.Length > 0) { highlighter = new FastVectorHighlighter( FastVectorHighlighter.DEFAULT_PHRASE_HIGHLIGHT, FastVectorHighlighter.DEFAULT_FIELD_MATCH, new SimpleFragListBuilder(), new SimpleFragmentsBuilder( indexQuery.HighlighterPreTags != null && indexQuery.HighlighterPreTags.Any() ? indexQuery.HighlighterPreTags : BaseFragmentsBuilder.COLORED_PRE_TAGS, indexQuery.HighlighterPostTags != null && indexQuery.HighlighterPostTags.Any() ? indexQuery.HighlighterPostTags : BaseFragmentsBuilder.COLORED_POST_TAGS)); fieldQuery = highlighter.GetFieldQuery(luceneQuery); } for (var i = start; (i - start) < pageSize && i < search.ScoreDocs.Length; i++) { var scoreDoc = search.ScoreDocs[i]; var document = indexSearcher.Doc(scoreDoc.Doc); var indexQueryResult = parent.RetrieveDocument(document, fieldsToFetch, scoreDoc.Score); if (ShouldIncludeInResults(indexQueryResult) == false) { indexQuery.SkippedResults.Value++; skippedResultsInCurrentLoop++; continue; } if (highlighter != null) { var highlightings = from highlightedField in this.indexQuery.HighlightedFields select new { highlightedField.Field, highlightedField.FragmentsField, Fragments = highlighter.GetBestFragments( fieldQuery, indexSearcher.IndexReader, scoreDoc.Doc, highlightedField.Field, highlightedField.FragmentLength, highlightedField.FragmentCount) } into fieldHighlitings where fieldHighlitings.Fragments != null && fieldHighlitings.Fragments.Length > 0 select fieldHighlitings; if (fieldsToFetch.IsProjection || parent.IsMapReduce) { foreach (var highlighting in highlightings) if (!string.IsNullOrEmpty(highlighting.FragmentsField)) indexQueryResult.Projection[highlighting.FragmentsField] = new RavenJArray(highlighting.Fragments); } else indexQueryResult.Highligtings = highlightings .ToDictionary(x => x.Field, x => x.Fragments); } returnedResults++; yield return indexQueryResult; if (returnedResults == indexQuery.PageSize) yield break; } readAll = search.TotalHits == search.ScoreDocs.Length; } while (returnedResults < indexQuery.PageSize && readAll == false); } } }