public abstract Scorer ( Lucene.Net.Index.IndexReader reader, bool scoreDocsInOrder, bool topScorer ) : Lucene.Net.Search.Scorer | ||
reader | Lucene.Net.Index.IndexReader |
/// the |
scoreDocsInOrder | bool | specifies whether in-order scoring of documents is required. Note
/// that if set to false (i.e., out-of-order scoring is required),
/// this method can return whatever scoring mode it supports, as every
/// in-order scorer is also an out-of-order one. However, an
/// out-of-order scorer may not support |
topScorer | bool |
/// if true, |
리턴 | Lucene.Net.Search.Scorer |
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector) { DocIdSet docIdSet = filter.GetDocIdSet(reader); if (docIdSet == null) return; Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) return; scorer.DocID(); DocIdSetIterator docIdSetIterator = docIdSet.Iterator(); if (docIdSetIterator == null) return; int target = docIdSetIterator.NextDoc(); int num = scorer.Advance(target); collector.SetScorer(scorer); while (true) { while (num != target) { if (num > target) target = docIdSetIterator.Advance(num); else num = scorer.Advance(target); } if (num != DocIdSetIterator.NO_MORE_DOCS && !((GroupCollector)collector).GroupLimitReached) { collector.Collect(num); target = docIdSetIterator.NextDoc(); num = scorer.Advance(target); } else break; } }
public override void Search(Weight weight, Filter filter, Collector collector) { if (filter == null) { for (int index = 0; index < this.subReaders.Length; ++index) { collector.SetNextReader(this.subReaders[index], this.docStarts[index]); Scorer scorer = weight.Scorer(this.subReaders[index], !collector.AcceptsDocsOutOfOrder, true); if (scorer != null) this.SearchWithScorer(this.subReaders[index], weight, scorer, collector); } } else { for (int index = 0; index < this.subReaders.Length; ++index) { collector.SetNextReader(this.subReaders[index], this.docStarts[index]); this.SearchWithFilter(this.subReaders[index], weight, filter, collector); } } }
/* Create the scorer used to score our associated DisjunctionMaxQuery */ public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer) { Scorer[] scorers = new Scorer[weights.Count]; int idx = 0; for (System.Collections.IEnumerator iter = weights.GetEnumerator(); iter.MoveNext();) { Weight w = (Weight)iter.Current; Scorer subScorer = w.Scorer(reader, true, false); if (subScorer != null && subScorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { scorers[idx++] = subScorer; } } if (idx == 0) { return(null); // all scorers did not have documents } DisjunctionMaxScorer result = new DisjunctionMaxScorer(Enclosing_Instance.tieBreakerMultiplier, similarity, scorers, idx); return(result); }
public override void Collect(int doc) { float score = sc.Score(); LastDoc[0] = doc; try { if (scorer == null) { Weight w = s.CreateNormalizedWeight(q); AtomicReaderContext context = ReaderContextArray[leafPtr]; scorer = w.Scorer(context, (context.AtomicReader).LiveDocs); } int op = Order[(Opidx[0]++) % Order.Length]; // System.out.println(op==skip_op ? // "skip("+(sdoc[0]+1)+")":"next()"); bool more = op == Skip_op?scorer.Advance(scorer.DocID() + 1) != DocIdSetIterator.NO_MORE_DOCS : scorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS; int scorerDoc = scorer.DocID(); float scorerScore = scorer.Score(); float scorerScore2 = scorer.Score(); float scoreDiff = Math.Abs(score - scorerScore); float scorerDiff = Math.Abs(scorerScore2 - scorerScore); if (!more || doc != scorerDoc || scoreDiff > MaxDiff || scorerDiff > MaxDiff) { StringBuilder sbord = new StringBuilder(); for (int i = 0; i < Order.Length; i++) { sbord.Append(Order[i] == Skip_op ? " skip()" : " next()"); } throw new Exception("ERROR matching docs:" + "\n\t" + (doc != scorerDoc ? "--> " : "") + "doc=" + doc + ", scorerDoc=" + scorerDoc + "\n\t" + (!more ? "--> " : "") + "tscorer.more=" + more + "\n\t" + (scoreDiff > MaxDiff ? "--> " : "") + "scorerScore=" + scorerScore + " scoreDiff=" + scoreDiff + " maxDiff=" + MaxDiff + "\n\t" + (scorerDiff > MaxDiff ? "--> " : "") + "scorerScore2=" + scorerScore2 + " scorerDiff=" + scorerDiff + "\n\thitCollector.Doc=" + doc + " score=" + score + "\n\t Scorer=" + scorer + "\n\t Query=" + q + " " + q.GetType().Name + "\n\t Searcher=" + s + "\n\t Order=" + sbord + "\n\t Op=" + (op == Skip_op ? " skip()" : " next()")); } } catch (IOException e) { throw new Exception(e.Message, e); } }
public override void Collect(int doc, IState state) { //System.out.println("doc="+doc); float score = this.scorer.Score(null); try { for (int i = lastDoc[0] + 1; i <= doc; i++) { Weight w = q.Weight(s, null); Scorer scorer = w.Scorer(reader, true, false, null); Assert.IsTrue(scorer.Advance(i, null) != DocIdSetIterator.NO_MORE_DOCS, "query collected " + doc + " but skipTo(" + i + ") says no more docs!"); Assert.AreEqual(doc, scorer.DocID(), "query collected " + doc + " but skipTo(" + i + ") got to " + scorer.DocID()); float skipToScore = scorer.Score(null); Assert.AreEqual(skipToScore, scorer.Score(null), maxDiff, "unstable skipTo(" + i + ") score!"); Assert.AreEqual(score, skipToScore, maxDiff, "query assigned doc " + doc + " a score of <" + score + "> but skipTo(" + i + ") has <" + skipToScore + ">!"); } lastDoc[0] = doc; } catch (System.IO.IOException e) { throw new System.SystemException("", e); } }
// inherit javadoc public override TopFieldDocs Search(Weight weight, Filter filter, int nDocs, Sort sort) { Scorer scorer = weight.Scorer(reader); if (scorer == null) return new TopFieldDocs(0, new ScoreDoc[0], sort.fields, System.Single.NegativeInfinity); System.Collections.BitArray bits = filter != null ? filter.Bits(reader) : null; FieldSortedHitQueue hq = new FieldSortedHitQueue(reader, sort.fields, nDocs); int[] totalHits = new int[1]; scorer.Score(new AnonymousClassHitCollector1(bits, totalHits, hq, this)); ScoreDoc[] scoreDocs = new ScoreDoc[hq.Size()]; for (int i = hq.Size() - 1; i >= 0; i--) // put docs in array scoreDocs[i] = hq.FillFields((FieldDoc) hq.Pop()); return new TopFieldDocs(totalHits[0], scoreDocs, hq.GetFields(), hq.GetMaxScore()); }
// inherit javadoc public override TopDocs Search(Weight weight, Filter filter, int nDocs) { if (nDocs <= 0) // null might be returned from hq.top() below. throw new System.ArgumentException("nDocs must be > 0"); Scorer scorer = weight.Scorer(reader); if (scorer == null) return new TopDocs(0, new ScoreDoc[0], System.Single.NegativeInfinity); System.Collections.BitArray bits = filter != null?filter.Bits(reader):null; HitQueue hq = new HitQueue(nDocs); int[] totalHits = new int[1]; scorer.Score(new AnonymousClassHitCollector(bits, totalHits, hq, nDocs, this)); ScoreDoc[] scoreDocs = new ScoreDoc[hq.Size()]; for (int i = hq.Size() - 1; i >= 0; i--) // put docs in array scoreDocs[i] = (ScoreDoc) hq.Pop(); float maxScore = (totalHits[0] == 0) ? System.Single.NegativeInfinity : scoreDocs[0].score; return new TopDocs(totalHits[0], scoreDocs, maxScore); }
public override TopDocs Rescore(IndexSearcher searcher, TopDocs firstPassTopDocs, int topN) { ScoreDoc[] hits = (ScoreDoc[])firstPassTopDocs.ScoreDocs.Clone(); Array.Sort(hits, new ComparatorAnonymousInnerClassHelper(this)); IList <AtomicReaderContext> leaves = searcher.IndexReader.Leaves; Weight weight = searcher.CreateNormalizedWeight(Query); // Now merge sort docIDs from hits, with reader's leaves: int hitUpto = 0; int readerUpto = -1; int endDoc = 0; int docBase = 0; Scorer scorer = null; while (hitUpto < hits.Length) { ScoreDoc hit = hits[hitUpto]; int docID = hit.Doc; AtomicReaderContext readerContext = null; while (docID >= endDoc) { readerUpto++; readerContext = leaves[readerUpto]; endDoc = readerContext.DocBase + readerContext.Reader.MaxDoc; } if (readerContext != null) { // We advanced to another segment: docBase = readerContext.DocBase; scorer = weight.Scorer(readerContext, null); } int targetDoc = docID - docBase; int actualDoc = scorer.DocID(); if (actualDoc < targetDoc) { actualDoc = scorer.Advance(targetDoc); } if (actualDoc == targetDoc) { // Query did match this doc: hit.Score = Combine(hit.Score, true, scorer.Score()); } else { // Query did not match this doc: Debug.Assert(actualDoc > targetDoc); hit.Score = Combine(hit.Score, false, 0.0f); } hitUpto++; } // TODO: we should do a partial sort (of only topN) // instead, but typically the number of hits is // smallish: Array.Sort(hits, new ComparatorAnonymousInnerClassHelper2(this)); if (topN < hits.Length) { ScoreDoc[] subset = new ScoreDoc[topN]; Array.Copy(hits, 0, subset, 0, topN); hits = subset; } return(new TopDocs(firstPassTopDocs.TotalHits, hits, hits[0].Score)); }
public override void Search(Weight weight, Filter filter, Collector collector) { if (filter == null) { for (int i = 0; i < subReaders.Length; i++) { // search each subreader collector.SetNextReader(subReaders[i], docStarts[i]); Scorer scorer = weight.Scorer(subReaders[i], !collector.AcceptsDocsOutOfOrder(), true); if (scorer != null) { scorer.Score(collector); } } } else { for (int i = 0; i < subReaders.Length; i++) { // search each subreader collector.SetNextReader(subReaders[i], docStarts[i]); SearchWithFilter(subReaders[i], weight, filter, collector); } } }
public override Explanation Explain(AtomicReaderContext context, int doc) { int minShouldMatch = OuterInstance.MinimumNumberShouldMatch; ComplexExplanation sumExpl = new ComplexExplanation(); sumExpl.Description = "sum of:"; int coord = 0; float sum = 0.0f; bool fail = false; int shouldMatchCount = 0; IEnumerator <BooleanClause> cIter = OuterInstance.clauses.GetEnumerator(); for (IEnumerator <Weight> wIter = Weights.GetEnumerator(); wIter.MoveNext();) { Weight w = wIter.Current; cIter.MoveNext(); BooleanClause c = cIter.Current; if (w.Scorer(context, context.AtomicReader.LiveDocs) == null) { if (c.Required) { fail = true; Explanation r = new Explanation(0.0f, "no match on required clause (" + c.Query.ToString() + ")"); sumExpl.AddDetail(r); } continue; } Explanation e = w.Explain(context, doc); if (e.IsMatch) { if (!c.Prohibited) { sumExpl.AddDetail(e); sum += e.Value; coord++; } else { Explanation r = new Explanation(0.0f, "match on prohibited clause (" + c.Query.ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } if (c.Occur_ == Occur_e.SHOULD) { shouldMatchCount++; } } else if (c.Required) { Explanation r = new Explanation(0.0f, "no match on required clause (" + c.Query.ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } } if (fail) { sumExpl.Match = false; sumExpl.Value = 0.0f; sumExpl.Description = "Failure to meet condition(s) of required/prohibited clause(s)"; return(sumExpl); } else if (shouldMatchCount < minShouldMatch) { sumExpl.Match = false; sumExpl.Value = 0.0f; sumExpl.Description = "Failure to match minimum number " + "of optional clauses: " + minShouldMatch; return(sumExpl); } sumExpl.Match = 0 < coord ? true : false; sumExpl.Value = sum; float coordFactor = DisableCoord ? 1.0f : Coord(coord, maxCoord); if (coordFactor == 1.0f) { return(sumExpl); // eliminate wrapper } else { ComplexExplanation result = new ComplexExplanation(sumExpl.IsMatch, sum * coordFactor, "product of:"); result.AddDetail(sumExpl); result.AddDetail(new Explanation(coordFactor, "coord(" + coord + "/" + maxCoord + ")")); return(result); } }
public virtual void TestBS2DisjunctionNextVsAdvance() { Directory d = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone); int numDocs = AtLeast(300); for (int docUpto = 0; docUpto < numDocs; docUpto++) { string contents = "a"; if (Random().Next(20) <= 16) { contents += " b"; } if (Random().Next(20) <= 8) { contents += " c"; } if (Random().Next(20) <= 4) { contents += " d"; } if (Random().Next(20) <= 2) { contents += " e"; } if (Random().Next(20) <= 1) { contents += " f"; } Document doc = new Document(); doc.Add(new TextField("field", contents, Field.Store.NO)); w.AddDocument(doc); } w.ForceMerge(1); IndexReader r = w.Reader; IndexSearcher s = NewSearcher(r); w.Dispose(); for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++) { if (VERBOSE) { Console.WriteLine("iter=" + iter); } IList <string> terms = new List <string>(Arrays.AsList("a", "b", "c", "d", "e", "f")); int numTerms = TestUtil.NextInt(Random(), 1, terms.Count); while (terms.Count > numTerms) { terms.RemoveAt(Random().Next(terms.Count)); } if (VERBOSE) { Console.WriteLine(" terms=" + terms); } BooleanQuery q = new BooleanQuery(); foreach (string term in terms) { q.Add(new BooleanClause(new TermQuery(new Term("field", term)), BooleanClause.Occur.SHOULD)); } Weight weight = s.CreateNormalizedWeight(q); Scorer scorer = weight.Scorer(s.LeafContexts[0], null); // First pass: just use .NextDoc() to gather all hits IList <ScoreDoc> hits = new List <ScoreDoc>(); while (scorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { hits.Add(new ScoreDoc(scorer.DocID(), scorer.Score())); } if (VERBOSE) { Console.WriteLine(" " + hits.Count + " hits"); } // Now, randomly next/advance through the list and // verify exact match: for (int iter2 = 0; iter2 < 10; iter2++) { weight = s.CreateNormalizedWeight(q); scorer = weight.Scorer(s.LeafContexts[0], null); if (VERBOSE) { Console.WriteLine(" iter2=" + iter2); } int upto = -1; while (upto < hits.Count) { int nextUpto; int nextDoc; int left = hits.Count - upto; if (left == 1 || Random().nextBoolean()) { // next nextUpto = 1 + upto; nextDoc = scorer.NextDoc(); } else { // advance int inc = TestUtil.NextInt(Random(), 1, left - 1); nextUpto = inc + upto; nextDoc = scorer.Advance(hits[nextUpto].Doc); } if (nextUpto == hits.Count) { Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, nextDoc); } else { ScoreDoc hit = hits[nextUpto]; Assert.AreEqual(hit.Doc, nextDoc); // Test for precise float equality: Assert.IsTrue(hit.Score == scorer.Score(), "doc " + hit.Doc + " has wrong score: expected=" + hit.Score + " actual=" + scorer.Score()); } upto = nextUpto; } } } r.Dispose(); d.Dispose(); }
public override void Search(Weight weight, Filter filter, Collector results) { IndexReader reader = IndexReader; Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return; } results.SetScorer(scorer); results.SetNextReader(reader, 0); FacetValidator validator = CreateFacetValidator(); int target = 0; bool more; if (filter == null) { more = scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; while (more) { target = scorer.DocID(); if (validator.Validate(target)) { results.Collect(target); more = scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; } else { target = validator.NextTarget; more = scorer.Advance(target) != DocIdSetIterator.NO_MORE_DOCS; } } return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? target = filterDocIdIterator.NextDoc(); if (target == DocIdSetIterator.NO_MORE_DOCS) { return; } int doc = -1; while (true) { if (doc < target) { doc = scorer.Advance(target); if (doc == DocIdSetIterator.NO_MORE_DOCS) { break; } } if (doc == target) // permitted by filter { if (validator.Validate(doc)) { results.Collect(doc); target = filterDocIdIterator.NextDoc(); if (target == DocIdSetIterator.NO_MORE_DOCS) { break; } else { continue; } } else { // skip to the next possible docid target = validator.NextTarget; } } else // doc > target { target = doc; } target = filterDocIdIterator.Advance(target); if (target == DocIdSetIterator.NO_MORE_DOCS) { break; } } }
// inherit javadoc public override void Search(Weight weight, Filter filter, HitCollector results) { Scorer scorer = weight.Scorer(reader); if (scorer == null) return; if (filter == null) { scorer.Score(results); return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? bool more = filterDocIdIterator.Next() && scorer.SkipTo(filterDocIdIterator.Doc()); while (more) { int filterDocId = filterDocIdIterator.Doc(); if (filterDocId > scorer.Doc() && !scorer.SkipTo(filterDocId)) { more = false; } else { int scorerDocId = scorer.Doc(); if (scorerDocId == filterDocId) // permitted by filter { results.Collect(scorerDocId, scorer.Score()); more = filterDocIdIterator.Next(); } else { more = filterDocIdIterator.SkipTo(scorerDocId); } } } }
// inherit javadoc public override void Search(Weight weight, Filter filter, HitCollector results) { HitCollector collector = results; if (filter != null) { System.Collections.BitArray bits = filter.Bits(reader); collector = new AnonymousClassHitCollector(bits, results, this); } Scorer scorer = weight.Scorer(reader); if (scorer == null) return ; scorer.Score(collector); }
public override Scorer FilteredScorer(AtomicReaderContext context, Weight weight, DocIdSet docIdSet) { DocIdSetIterator filterIter = docIdSet.GetIterator(); if (filterIter == null) { // this means the filter does not accept any documents. return null; } int firstFilterDoc = filterIter.NextDoc(); if (firstFilterDoc == DocIdSetIterator.NO_MORE_DOCS) { return null; } Bits filterAcceptDocs = docIdSet.GetBits(); // force if RA is requested bool useRandomAccess = filterAcceptDocs != null && UseRandomAccess(filterAcceptDocs, firstFilterDoc); if (useRandomAccess) { // if we are using random access, we return the inner scorer, just with other acceptDocs return weight.Scorer(context, filterAcceptDocs); } else { Debug.Assert(firstFilterDoc > -1); // we are gonna advance() this scorer, so we set inorder=true/toplevel=false // we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice Scorer scorer = weight.Scorer(context, null); // TODO once we have way to figure out if we use RA or LeapFrog we can remove this scorer return (scorer == null) ? null : new PrimaryAdvancedLeapFrogScorer(weight, firstFilterDoc, filterIter, scorer); } }
/// <returns> A good old 1.4 Scorer /// </returns> public virtual Scorer Scorer(IndexReader reader) { // First see if the (faster) ConjunctionScorer will work. This can be // used when all clauses are required. Also, at this point a // BooleanScorer cannot be embedded in a ConjunctionScorer, as the hits // from a BooleanScorer are not always sorted by document number (sigh) // and hence BooleanScorer cannot implement skipTo() correctly, which is // required by ConjunctionScorer. bool allRequired = true; bool noneBoolean = true; for (int i = 0; i < weights.Count; i++) { BooleanClause c = (BooleanClause)Enclosing_Instance.clauses[i]; if (!c.IsRequired()) { allRequired = false; } if (c.GetQuery() is BooleanQuery) { noneBoolean = false; } } if (allRequired && noneBoolean) { // ConjunctionScorer is okay ConjunctionScorer result = new ConjunctionScorer(similarity); for (int i = 0; i < weights.Count; i++) { Weight w = (Weight)weights[i]; Scorer subScorer = w.Scorer(reader); if (subScorer == null) { return(null); } result.Add(subScorer); } return(result); } // Use good-old BooleanScorer instead. BooleanScorer result2 = new BooleanScorer(similarity); for (int i = 0; i < weights.Count; i++) { BooleanClause c = (BooleanClause)Enclosing_Instance.clauses[i]; Weight w = (Weight)weights[i]; Scorer subScorer = w.Scorer(reader); if (subScorer != null) { result2.Add(subScorer, c.IsRequired(), c.IsProhibited()); } else if (c.IsRequired()) { return(null); } } return(result2); }
public override Scorer FilteredScorer(AtomicReaderContext context, Weight weight, DocIdSet docIdSet) { DocIdSetIterator filterIter = docIdSet.GetIterator(); if (filterIter == null) { // this means the filter does not accept any documents. return null; } // we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice Scorer scorer = weight.Scorer(context, null); if (scorer == null) { return null; } if (ScorerFirst) { return new LeapFrogScorer(weight, scorer, filterIter, scorer); } else { return new LeapFrogScorer(weight, filterIter, scorer, scorer); } }
public override Explanation Explain(IndexReader reader, int doc) { int minShouldMatch = Enclosing_Instance.GetMinimumNumberShouldMatch(); ComplexExplanation sumExpl = new ComplexExplanation(); sumExpl.SetDescription("sum of:"); int coord = 0; int maxCoord = 0; float sum = 0.0f; bool fail = false; int shouldMatchCount = 0; for (System.Collections.IEnumerator wIter = weights.GetEnumerator(), cIter = Enclosing_Instance.clauses.GetEnumerator(); wIter.MoveNext();) { cIter.MoveNext(); Weight w = (Weight)wIter.Current; BooleanClause c = (BooleanClause)cIter.Current; if (w.Scorer(reader, true, true) == null) { continue; } Explanation e = w.Explain(reader, doc); if (!c.IsProhibited()) { maxCoord++; } if (e.IsMatch()) { if (!c.IsProhibited()) { sumExpl.AddDetail(e); sum += e.GetValue(); coord++; } else { Explanation r = new Explanation(0.0f, "match on prohibited clause (" + c.GetQuery().ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } if (c.GetOccur() == Occur.SHOULD) { shouldMatchCount++; } } else if (c.IsRequired()) { Explanation r = new Explanation(0.0f, "no match on required clause (" + c.GetQuery().ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } } if (fail) { System.Boolean tempAux = false; sumExpl.SetMatch(tempAux); sumExpl.SetValue(0.0f); sumExpl.SetDescription("Failure to meet condition(s) of required/prohibited clause(s)"); return(sumExpl); } else if (shouldMatchCount < minShouldMatch) { System.Boolean tempAux2 = false; sumExpl.SetMatch(tempAux2); sumExpl.SetValue(0.0f); sumExpl.SetDescription("Failure to match minimum number " + "of optional clauses: " + minShouldMatch); return(sumExpl); } sumExpl.SetMatch(0 < coord?true:false); sumExpl.SetValue(sum); float coordFactor = similarity.Coord(coord, maxCoord); if (coordFactor == 1.0f) { // coord is no-op return(sumExpl); } // eliminate wrapper else { ComplexExplanation result = new ComplexExplanation(sumExpl.IsMatch(), sum * coordFactor, "product of:"); result.AddDetail(sumExpl); result.AddDetail(new Explanation(coordFactor, "coord(" + coord + "/" + maxCoord + ")")); return(result); } }
public override Scorer FilteredScorer(AtomicReaderContext context, Weight weight, DocIdSet docIdSet) { Bits filterAcceptDocs = docIdSet.GetBits(); if (filterAcceptDocs == null) { // Filter does not provide random-access Bits; we // must fallback to leapfrog: return LEAP_FROG_QUERY_FIRST_STRATEGY.FilteredScorer(context, weight, docIdSet); } Scorer scorer = weight.Scorer(context, null); return scorer == null ? null : new QueryFirstScorer(weight, filterAcceptDocs, scorer); }
/// <summary> Just like <see cref="Search(Weight, Filter, int, Sort)" />, but you choose /// whether or not the fields in the returned <see cref="FieldDoc" /> instances /// should be set by specifying fillFields.<br/> /// /// <p/> /// NOTE: this does not compute scores by default. If you need scores, create /// a <see cref="TopFieldCollector" /> instance by calling /// <see cref="TopFieldCollector.create" /> and then pass that to /// <see cref="Search(Weight, Filter, Collector)" />. /// <p/> /// </summary> public virtual TopFieldDocs Search(Weight weight, Filter filter, int nDocs, Sort sort, bool fillFields) { nDocs = Math.Min(nDocs, reader.MaxDoc()); SortField[] fields = sort.fields; bool legacy = false; for (int i = 0; i < fields.Length; i++) { SortField field = fields[i]; System.String fieldname = field.GetField(); int type = field.GetType(); // Resolve AUTO into its true type if (type == SortField.AUTO) { int autotype = SortField.DetectFieldType(reader, fieldname); if (autotype == SortField.STRING) { fields[i] = new SortField(fieldname, field.GetLocale(), field.GetReverse()); } else { fields[i] = new SortField(fieldname, autotype, field.GetReverse()); } } if (field.GetUseLegacySearch()) { legacy = true; } } if (legacy) { // Search the single top-level reader TopDocCollector collector = new TopFieldDocCollector(reader, sort, nDocs); HitCollectorWrapper hcw = new HitCollectorWrapper(collector); hcw.SetNextReader(reader, 0); if (filter == null) { Scorer scorer = weight.Scorer(reader, true, true); if (scorer != null) { scorer.Score(hcw); } } else { SearchWithFilter(reader, weight, filter, hcw); } return (TopFieldDocs) collector.TopDocs(); } TopFieldCollector collector2 = TopFieldCollector.create(sort, nDocs, fillFields, fieldSortDoTrackScores, fieldSortDoMaxScore, !weight.ScoresDocsOutOfOrder()); Search(weight, filter, collector2); return (TopFieldDocs) collector2.TopDocs(); }
public override BulkScorer FilteredBulkScorer(AtomicReaderContext context, Weight weight, bool scoreDocsInOrder, DocIdSet docIdSet) // ignored (we always top-score in order) { Bits filterAcceptDocs = docIdSet.GetBits(); if (filterAcceptDocs == null) { // Filter does not provide random-access Bits; we // must fallback to leapfrog: return LEAP_FROG_QUERY_FIRST_STRATEGY.FilteredBulkScorer(context, weight, scoreDocsInOrder, docIdSet); } Scorer scorer = weight.Scorer(context, null); return scorer == null ? null : new QueryFirstBulkScorer(scorer, filterAcceptDocs); }
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector) { System.Diagnostics.Debug.Assert(filter != null); Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return ; } int docID = scorer.DocID(); System.Diagnostics.Debug.Assert(docID == - 1 || docID == DocIdSetIterator.NO_MORE_DOCS); // CHECKME: use ConjunctionScorer here? DocIdSet filterDocIdSet = filter.GetDocIdSet(reader); if (filterDocIdSet == null) { // this means the filter does not accept any documents. return ; } DocIdSetIterator filterIter = filterDocIdSet.Iterator(); if (filterIter == null) { // this means the filter does not accept any documents. return ; } int filterDoc = filterIter.NextDoc(); int scorerDoc = scorer.Advance(filterDoc); collector.SetScorer(scorer); while (true) { if (scorerDoc == filterDoc) { // Check if scorer has exhausted, only before collecting. if (scorerDoc == DocIdSetIterator.NO_MORE_DOCS) { break; } collector.Collect(scorerDoc); filterDoc = filterIter.NextDoc(); scorerDoc = scorer.Advance(filterDoc); } else if (scorerDoc > filterDoc) { filterDoc = filterIter.Advance(scorerDoc); } else { scorerDoc = scorer.Advance(filterDoc); } } }
public override void Search(Weight weight, Filter filter, Collector results) { IndexReader reader = IndexReader; bool doValidate = false; FacetHitCollector[] facetCollectors = this.facetCollectors.ToArray(); foreach (FacetHitCollector facetCollector in facetCollectors) { if (facetCollector.PostDocIDSetIterator != null) { doValidate = true; break; } } Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return; } results.SetScorer(scorer); if (filter == null) { while (scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS) { int doc = scorer.DocID(); if (validateAndIncrement(doc, facetCollectors, doValidate)) { results.Collect(doc); } } return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? bool more = filterDocIdIterator.NextDoc() != DocIdSetIterator.NO_MORE_DOCS && scorer.Advance(filterDocIdIterator.DocID()) != DocIdSetIterator.NO_MORE_DOCS; while (more) { int filterDocId = filterDocIdIterator.DocID(); if (filterDocId > scorer.DocID() && scorer.Advance(filterDocId)==DocIdSetIterator.NO_MORE_DOCS) { more = false; } else { int scorerDocId = scorer.DocID(); if (scorerDocId == filterDocId) // permitted by filter { if (validateAndIncrement(scorerDocId, facetCollectors, doValidate)) { results.Collect(scorerDocId); } more = filterDocIdIterator.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; } else { more = filterDocIdIterator.Advance(scorerDocId) != DocIdSetIterator.NO_MORE_DOCS; } } } }