// firstDocID is ignored since nextDoc() sets 'doc' public /*protected internal*/ override bool Score(Collector c, int end, int firstDocID) { c.SetScorer(this); while (doc < end) { // for docs in window c.Collect(doc); // collect score if (++pointer >= pointerMax) { pointerMax = termDocs.Read(docs, freqs); // refill buffers if (pointerMax != 0) { pointer = 0; } else { termDocs.Close(); // close stream doc = System.Int32.MaxValue; // set to sentinel value return false; } } doc = docs[pointer]; } return true; }
/// <summary>Scores and collects all matching documents.</summary> /// <param name="collector">The collector to which all matching documents are passed. /// <br/>When this method is used the {@link #Explain(int)} method should not be used. /// </param> public virtual void Score(Collector collector) { collector.SetScorer(this); int doc; while ((doc = NextDoc()) != NO_MORE_DOCS) { collector.Collect(doc); } }
/// <summary>Expert: Collects matching documents in a range. Hook for optimization. /// Note that <see cref="NextDoc()" /> must be called once before this method is called /// for the first time. /// </summary> /// <param name="collector">The collector to which all matching documents are passed through. /// </param> /// <param name="max">Do not score documents past this. /// </param> /// <param name="firstDocID"></param> /// <returns> true if more matching documents may remain. /// </returns> public /*protected internal*/ override bool Score(Collector collector, int max, int firstDocID) { // firstDocID is ignored since nextDoc() sets 'currentDoc' collector.SetScorer(this); while (currentDoc < max) { collector.Collect(currentDoc); if (NextDoc() == NO_MORE_DOCS) { return(false); } } return(true); }
public virtual void TestCollector() { // Tests that the collector delegates calls to input collectors properly. // Tests that the collector handles some null collectors well. If it // doesn't, an NPE would be thrown. DummyCollector[] dcs = new DummyCollector[] { new DummyCollector(), new DummyCollector() }; Collector c = MultiCollector.Wrap(dcs); Assert.IsTrue(c.AcceptsDocsOutOfOrder()); c.Collect(1); c.NextReader = null; c.Scorer = null; foreach (DummyCollector dc in dcs) { Assert.IsTrue(dc.AcceptsDocsOutOfOrderCalled); Assert.IsTrue(dc.CollectCalled); Assert.IsTrue(dc.SetNextReaderCalled); Assert.IsTrue(dc.SetScorerCalled); } }
public virtual void TestNullCollectors() { // Tests that the collector rejects all null collectors. try { MultiCollector.Wrap(null, null); Assert.Fail("only null collectors should not be supported"); } catch (System.ArgumentException e) { // expected } // Tests that the collector handles some null collectors well. If it // doesn't, an NPE would be thrown. Collector c = MultiCollector.Wrap(new DummyCollector(), null, new DummyCollector()); Assert.IsTrue(c is MultiCollector); Assert.IsTrue(c.AcceptsDocsOutOfOrder()); c.Collect(1); c.NextReader = null; c.Scorer = null; }
/// <summary>Scores and collects all matching documents.</summary> /// <param name="collector">The collector to which all matching documents are passed through. /// <br/>When this method is used the {@link #Explain(int)} method should not be used. /// </param> public override void Score(Collector collector) { collector.SetScorer(this); while ((doc = countingSumScorer.NextDoc()) != NO_MORE_DOCS) { collector.Collect(doc); } }
public override bool Score(Collector collector, int max) { collector.Scorer = new FakeScorer(); collector.Collect(0); return false; }
public override bool Score(Collector c, int maxDoc) { Debug.Assert(doc == -1); doc = 3000; FakeScorer fs = new FakeScorer(); fs.SetDoc(doc); fs.SetScore(1.0f); c.Scorer = fs; c.Collect(3000); return false; }
public override bool Score(Collector collector, int max) { bool more; Bucket tmp; FakeScorer fs = new FakeScorer(); // The internal loop will set the score and doc before calling collect. collector.Scorer = fs; do { bucketTable.First = null; while (Current != null) // more queued { // check prohibited & required if ((Current.Bits & PROHIBITED_MASK) == 0) { // TODO: re-enable this if BQ ever sends us required // clauses //&& (current.bits & requiredMask) == requiredMask) { // NOTE: Lucene always passes max = // Integer.MAX_VALUE today, because we never embed // a BooleanScorer inside another (even though // that should work)... but in theory an outside // app could pass a different max so we must check // it: if (Current.Doc >= max) { tmp = Current; Current = Current.Next; tmp.Next = bucketTable.First; bucketTable.First = tmp; continue; } if (Current.Coord >= MinNrShouldMatch) { fs.score = (float)(Current.Score * CoordFactors[Current.Coord]); fs.doc = Current.Doc; fs.freq = Current.Coord; collector.Collect(Current.Doc); } } Current = Current.Next; // pop the queue } if (bucketTable.First != null) { Current = bucketTable.First; bucketTable.First = Current.Next; return(true); } // refill the queue more = false; End += BucketTable.SIZE; for (SubScorer sub = Scorers; sub != null; sub = sub.Next) { if (sub.More) { sub.More = sub.Scorer.Score(sub.Collector, End); more |= sub.More; } } Current = bucketTable.First; } while (Current != null || more); return(false); }
public override bool Score(Collector collector, int max) { bool more; Bucket tmp; FakeScorer fs = new FakeScorer(); // The internal loop will set the score and doc before calling collect. collector.Scorer = fs; do { bucketTable.First = null; while (Current != null) // more queued { // check prohibited & required if ((Current.Bits & PROHIBITED_MASK) == 0) { // TODO: re-enable this if BQ ever sends us required // clauses //&& (current.bits & requiredMask) == requiredMask) { // NOTE: Lucene always passes max = // Integer.MAX_VALUE today, because we never embed // a BooleanScorer inside another (even though // that should work)... but in theory an outside // app could pass a different max so we must check // it: if (Current.Doc >= max) { tmp = Current; Current = Current.Next; tmp.Next = bucketTable.First; bucketTable.First = tmp; continue; } if (Current.Coord >= MinNrShouldMatch) { fs.score = (float)(Current.Score * CoordFactors[Current.Coord]); fs.doc = Current.Doc; fs.freq = Current.Coord; collector.Collect(Current.Doc); } } Current = Current.Next; // pop the queue } if (bucketTable.First != null) { Current = bucketTable.First; bucketTable.First = Current.Next; return true; } // refill the queue more = false; End += BucketTable.SIZE; for (SubScorer sub = Scorers; sub != null; sub = sub.Next) { if (sub.More) { sub.More = sub.Scorer.Score(sub.Collector, End); more |= sub.More; } } Current = bucketTable.First; } while (Current != null || more); return false; }
public override bool Score(Collector collector, int max) { FakeScorer fakeScorer = new FakeScorer(); collector.Scorer = fakeScorer; if (_doc == -1) { _doc = NextDocOutOfOrder(); } while (_doc < max) { fakeScorer.doc = _doc; fakeScorer._score = outerInstance._scores[outerInstance._ords[_scoreUpto]]; collector.Collect(_doc); _doc = NextDocOutOfOrder(); } return _doc != DocIdSetIterator.NO_MORE_DOCS; }
/// <summary> Expert: Collects matching documents in a range. Hook for optimization. /// Note, <code>firstDocID</code> is added to ensure that {@link #NextDoc()} /// was called before this method. /// /// </summary> /// <param name="collector">The collector to which all matching documents are passed. /// </param> /// <param name="max">Do not score documents past this. /// </param> /// <param name="firstDocID"> /// The first document ID (ensures {@link #NextDoc()} is called before /// this method. /// </param> /// <returns> true if more matching documents may remain. /// </returns> public /*protected internal*/ virtual bool Score(Collector collector, int max, int firstDocID) { collector.SetScorer(this); int doc = firstDocID; while (doc < max) { collector.Collect(doc); doc = NextDoc(); } return doc != NO_MORE_DOCS; }
/// <summary>Scores and collects all matching documents.</summary> /// <param name="collector">The collector to which all matching documents are passed through. /// <br/>When this method is used the <see cref="Explain(int)" /> method should not be used. /// </param> public override void Score(Collector collector) { collector.SetScorer(this); while (NextDoc() != NO_MORE_DOCS) { collector.Collect(currentDoc); } }
private void SearchWithScorer(IndexReader reader, Weight weight, Scorer scorer, Collector collector) { if (scorer == null) return; scorer.DocID(); int num = scorer.NextDoc(); ; collector.SetScorer(scorer); while (true) { if (num != DocIdSetIterator.NO_MORE_DOCS && !((GroupCollector)collector).GroupLimitReached) { collector.Collect(num); num = scorer.NextDoc(); } else break; } }
public override void Search(Weight weight, Filter filter, Collector results) { IndexReader reader = IndexReader; Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return; } results.SetScorer(scorer); results.SetNextReader(reader, 0); FacetValidator validator = CreateFacetValidator(); int target = 0; bool more; if (filter == null) { more = scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; while (more) { target = scorer.DocID(); if (validator.Validate(target)) { results.Collect(target); more = scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; } else { target = validator.NextTarget; more = scorer.Advance(target) != DocIdSetIterator.NO_MORE_DOCS; } } return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? target = filterDocIdIterator.NextDoc(); if (target == DocIdSetIterator.NO_MORE_DOCS) { return; } int doc = -1; while (true) { if (doc < target) { doc = scorer.Advance(target); if (doc == DocIdSetIterator.NO_MORE_DOCS) { break; } } if (doc == target) // permitted by filter { if (validator.Validate(doc)) { results.Collect(doc); target = filterDocIdIterator.NextDoc(); if (target == DocIdSetIterator.NO_MORE_DOCS) { break; } else { continue; } } else { // skip to the next possible docid target = validator.NextTarget; } } else // doc > target { target = doc; } target = filterDocIdIterator.Advance(target); if (target == DocIdSetIterator.NO_MORE_DOCS) { break; } } }
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector) { DocIdSet docIdSet = filter.GetDocIdSet(reader); if (docIdSet == null) return; Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) return; scorer.DocID(); DocIdSetIterator docIdSetIterator = docIdSet.Iterator(); if (docIdSetIterator == null) return; int target = docIdSetIterator.NextDoc(); int num = scorer.Advance(target); collector.SetScorer(scorer); while (true) { while (num != target) { if (num > target) target = docIdSetIterator.Advance(num); else num = scorer.Advance(target); } if (num != DocIdSetIterator.NO_MORE_DOCS && !((GroupCollector)collector).GroupLimitReached) { collector.Collect(num); target = docIdSetIterator.NextDoc(); num = scorer.Advance(target); } else break; } }
public override void Replay(Collector other) { ReplayInit(other); int curUpto = 0; int curBase = 0; int chunkUpto = 0; CurDocs = EMPTY_INT_ARRAY; foreach (SegStart seg in CachedSegs) { other.NextReader = seg.ReaderContext; other.Scorer = CachedScorer; while (curBase + curUpto < seg.End) { if (curUpto == CurDocs.Length) { curBase += CurDocs.Length; CurDocs = CachedDocs[chunkUpto]; CurScores = CachedScores[chunkUpto]; chunkUpto++; curUpto = 0; } CachedScorer.Score_Renamed = CurScores[curUpto]; CachedScorer.Doc = CurDocs[curUpto]; other.Collect(CurDocs[curUpto++]); } } }
internal static void ScoreAll(Collector collector, Scorer scorer) { int doc; while ((doc = scorer.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { collector.Collect(doc); } }
public /*protected internal*/ override bool Score(Collector collector, int max, int firstDocID) { doc = firstDocID; collector.SetScorer(this); while (doc < max) { collector.Collect(doc); doc = countingSumScorer.NextDoc(); } return doc != NO_MORE_DOCS; }
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector) { System.Diagnostics.Debug.Assert(filter != null); Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return ; } int docID = scorer.DocID(); System.Diagnostics.Debug.Assert(docID == - 1 || docID == DocIdSetIterator.NO_MORE_DOCS); // CHECKME: use ConjunctionScorer here? DocIdSet filterDocIdSet = filter.GetDocIdSet(reader); if (filterDocIdSet == null) { // this means the filter does not accept any documents. return ; } DocIdSetIterator filterIter = filterDocIdSet.Iterator(); if (filterIter == null) { // this means the filter does not accept any documents. return ; } int filterDoc = filterIter.NextDoc(); int scorerDoc = scorer.Advance(filterDoc); collector.SetScorer(scorer); while (true) { if (scorerDoc == filterDoc) { // Check if scorer has exhausted, only before collecting. if (scorerDoc == DocIdSetIterator.NO_MORE_DOCS) { break; } collector.Collect(scorerDoc); filterDoc = filterIter.NextDoc(); scorerDoc = scorer.Advance(filterDoc); } else if (scorerDoc > filterDoc) { filterDoc = filterIter.Advance(scorerDoc); } else { scorerDoc = scorer.Advance(filterDoc); } } }
// firstDocID is ignored since nextDoc() initializes 'current' public /*protected internal*/ override bool Score(Collector collector, int max, int firstDocID) { bool more; Bucket tmp; BucketScorer bs = new BucketScorer(); // The internal loop will set the score and doc before calling collect. collector.SetScorer(bs); do { bucketTable.first = null; while (current != null) { // more queued // check prohibited & required if ((current.bits & prohibitedMask) == 0 && (current.bits & requiredMask) == requiredMask) { if (current.doc >= max) { tmp = current; current = current.next; tmp.next = bucketTable.first; bucketTable.first = tmp; continue; } if (current.coord >= minNrShouldMatch) { bs.score = current.score * coordFactors[current.coord]; bs.doc = current.doc; collector.Collect(current.doc); } } current = current.next; // pop the queue } if (bucketTable.first != null) { current = bucketTable.first; bucketTable.first = current.next; return true; } // refill the queue more = false; end += BucketTable.SIZE; for (SubScorer sub = scorers; sub != null; sub = sub.next) { int subScorerDocID = sub.scorer.DocID(); if (subScorerDocID != NO_MORE_DOCS) { more |= sub.scorer.Score(sub.collector, end, subScorerDocID); } } current = bucketTable.first; } while (current != null || more); return false; }
private void SearchWithFilter(IndexReader reader, Weight weight, Scorer scorer, Collector collector) { if (scorer == null) return; scorer.DocID(); DocIdSetIterator docIdSetIterator = scorer; if (docIdSetIterator == null) return; int target = docIdSetIterator.NextDoc(); int num = target; // int num = scorer.Advance(target); collector.SetScorer(scorer); while (true) { //while (num != target) //{ // if (num > target) // target = docIdSetIterator.Advance(num); // else // num = scorer.Advance(target); //} if (num != DocIdSetIterator.NO_MORE_DOCS && !((BloclGroupingCollector) collector).GroupLimitReached) { collector.Collect(num); num = docIdSetIterator.NextDoc(); //target = docIdSetIterator.NextDoc(); //num = scorer.Advance(target); } else break; } }
public override bool Score(Collector collector, int max) { collector.Scorer = new FakeScorer(); collector.Collect(0); return(false); }
internal static bool ScoreRange(Collector collector, Scorer scorer, int currentDoc, int end) { while (currentDoc < end) { collector.Collect(currentDoc); currentDoc = scorer.NextDoc(); } return currentDoc != DocIdSetIterator.NO_MORE_DOCS; }
/// <summary>Expert: Collects matching documents in a range. Hook for optimization. /// Note that <see cref="Next()" /> must be called once before this method is called /// for the first time. /// </summary> /// <param name="collector">The collector to which all matching documents are passed through. /// </param> /// <param name="max">Do not score documents past this. /// </param> /// <param name="firstDocID"></param> /// <returns> true if more matching documents may remain. /// </returns> public /*protected internal*/ override bool Score(Collector collector, int max, int firstDocID) { // firstDocID is ignored since nextDoc() sets 'currentDoc' collector.SetScorer(this); while (currentDoc < max) { collector.Collect(currentDoc); if (NextDoc() == NO_MORE_DOCS) { return false; } } return true; }
public override bool Score(Collector collector, int maxDoc) { // the normalization trick already applies the boost of this query, // so we can use the wrapped scorer directly: collector.Scorer = Scorer; if (Scorer.DocID() == -1) { Scorer.NextDoc(); } while (true) { int scorerDoc = Scorer.DocID(); if (scorerDoc < maxDoc) { if (FilterBits.Get(scorerDoc)) { collector.Collect(scorerDoc); } Scorer.NextDoc(); } else { break; } } return Scorer.DocID() != Scorer.NO_MORE_DOCS; }
// firstDocID is ignored since nextDoc() initializes 'current' public /*protected internal*/ override bool Score(Collector collector, int max, int firstDocID) { bool more; Bucket tmp; BucketScorer bs = new BucketScorer(); // The internal loop will set the score and doc before calling collect. collector.SetScorer(bs); do { bucketTable.first = null; while (current != null) { // more queued // check prohibited & required if ((current.bits & prohibitedMask) == 0 && (current.bits & requiredMask) == requiredMask) { if (current.doc >= max) { tmp = current; current = current.next; tmp.next = bucketTable.first; bucketTable.first = tmp; continue; } if (current.coord >= minNrShouldMatch) { bs.score = current.score * coordFactors[current.coord]; bs.doc = current.doc; collector.Collect(current.doc); } } current = current.next; // pop the queue } if (bucketTable.first != null) { current = bucketTable.first; bucketTable.first = current.next; return(true); } // refill the queue more = false; end += BucketTable.SIZE; for (SubScorer sub = scorers; sub != null; sub = sub.next) { int subScorerDocID = sub.scorer.DocID(); if (subScorerDocID != NO_MORE_DOCS) { more |= sub.scorer.Score(sub.collector, end, subScorerDocID); } } current = bucketTable.first; }while (current != null || more); return(false); }
public override void Search(Weight weight, Filter filter, Collector results) { IndexReader reader = IndexReader; bool doValidate = false; FacetHitCollector[] facetCollectors = this.facetCollectors.ToArray(); foreach (FacetHitCollector facetCollector in facetCollectors) { if (facetCollector.PostDocIDSetIterator != null) { doValidate = true; break; } } Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return; } results.SetScorer(scorer); if (filter == null) { while (scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS) { int doc = scorer.DocID(); if (validateAndIncrement(doc, facetCollectors, doValidate)) { results.Collect(doc); } } return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? bool more = filterDocIdIterator.NextDoc() != DocIdSetIterator.NO_MORE_DOCS && scorer.Advance(filterDocIdIterator.DocID()) != DocIdSetIterator.NO_MORE_DOCS; while (more) { int filterDocId = filterDocIdIterator.DocID(); if (filterDocId > scorer.DocID() && scorer.Advance(filterDocId)==DocIdSetIterator.NO_MORE_DOCS) { more = false; } else { int scorerDocId = scorer.DocID(); if (scorerDocId == filterDocId) // permitted by filter { if (validateAndIncrement(scorerDocId, facetCollectors, doValidate)) { results.Collect(scorerDocId); } more = filterDocIdIterator.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; } else { more = filterDocIdIterator.Advance(scorerDocId) != DocIdSetIterator.NO_MORE_DOCS; } } } }