public override bool Score(ICollector collector, int maxDoc) { // the normalization trick already applies the boost of this query, // so we can use the wrapped scorer directly: collector.SetScorer(scorer); if (scorer.DocID == -1) { scorer.NextDoc(); } while (true) { int scorerDoc = scorer.DocID; if (scorerDoc < maxDoc) { if (filterBits.Get(scorerDoc)) { collector.Collect(scorerDoc); } scorer.NextDoc(); } else { break; } } return(scorer.DocID != Scorer.NO_MORE_DOCS); }
/// <summary>Scores and collects all matching documents.</summary> /// <param name="collector">The collector to which all matching documents are passed through. /// <br/>When this method is used the {@link #Explain(int)} method should not be used. /// </param> public override void Score(Collector collector) { collector.SetScorer(this); while ((doc = countingSumScorer.NextDoc()) != NO_MORE_DOCS) { collector.Collect(doc); } }
public virtual void TestNext() { Term allTerm = new Term(FIELD, "all"); TermQuery termQuery = new TermQuery(allTerm); Weight weight = IndexSearcher.CreateNormalizedWeight(termQuery); Assert.IsTrue(IndexSearcher.TopReaderContext is AtomicReaderContext); AtomicReaderContext context = (AtomicReaderContext)IndexSearcher.TopReaderContext; Scorer ts = weight.GetScorer(context, (context.AtomicReader).LiveDocs); Assert.IsTrue(ts.NextDoc() != DocIdSetIterator.NO_MORE_DOCS, "next did not return a doc"); Assert.IsTrue(ts.GetScore() == 1.6931472f, "score is not correct"); Assert.IsTrue(ts.NextDoc() != DocIdSetIterator.NO_MORE_DOCS, "next did not return a doc"); Assert.IsTrue(ts.GetScore() == 1.6931472f, "score is not correct"); Assert.IsTrue(ts.NextDoc() == DocIdSetIterator.NO_MORE_DOCS, "next returned a doc and it should not have"); }
internal static void ScoreAll(ICollector collector, Scorer scorer) { int doc; while ((doc = scorer.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { collector.Collect(doc); } }
private void AssertNext(Scorer expected, Scorer actual) { if (actual == null) { Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, expected.NextDoc()); return; } int doc; while ((doc = expected.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { Assert.AreEqual(doc, actual.NextDoc()); Assert.AreEqual(expected.Freq, actual.Freq); float expectedScore = expected.GetScore(); float actualScore = actual.GetScore(); Assert.AreEqual(expectedScore, actualScore, CheckHits.ExplainToleranceDelta(expectedScore, actualScore)); } Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, actual.NextDoc()); }
public override int NextDoc() { if (reqScorer == null) { return(doc); } doc = reqScorer.NextDoc(); if (doc == NO_MORE_DOCS) { reqScorer = null; // exhausted, nothing left return(doc); } if (exclDisi == null) { return(doc); } return(doc = ToNonExcluded()); }
internal static bool ScoreRange(ICollector collector, Scorer scorer, int currentDoc, int end) { while (currentDoc < end) { collector.Collect(currentDoc); currentDoc = scorer.NextDoc(); } return(currentDoc != DocIdSetIterator.NO_MORE_DOCS); }
public override int NextDoc() { int doc; for (; ;) { doc = scorer.NextDoc(); if (doc == Scorer.NO_MORE_DOCS || filterBits.Get(doc)) { return(scorerDoc = doc); } } }
/// <summary>Called the first time next() or skipTo() is called to /// initialize <code>scorerDocQueue</code>. /// </summary> private void InitScorerDocQueue() { System.Collections.IEnumerator si = subScorers.GetEnumerator(); scorerDocQueue = new ScorerDocQueue(nrScorers); while (si.MoveNext()) { Scorer se = (Scorer)si.Current; if (se.NextDoc() != NO_MORE_DOCS) { // doc() method will be used in scorerDocQueue. scorerDocQueue.Insert(se); } } }
public override void Collect(int doc, IState state) { float score = sc.Score(null); lastDoc[0] = doc; try { if (scorer == null) { Weight w = q.Weight(s, null); scorer = w.Scorer(reader, true, false, null); } int op = order[(opidx[0]++) % order.Length]; // System.out.println(op==skip_op ? // "skip("+(sdoc[0]+1)+")":"next()"); bool more = op == skip_op ? scorer.Advance(scorer.DocID() + 1, null) != DocIdSetIterator.NO_MORE_DOCS : scorer.NextDoc(null) != DocIdSetIterator.NO_MORE_DOCS; int scorerDoc = scorer.DocID(); float scorerScore = scorer.Score(null); float scorerScore2 = scorer.Score(null); float scoreDiff = System.Math.Abs(score - scorerScore); float scorerDiff = System.Math.Abs(scorerScore2 - scorerScore); if (!more || doc != scorerDoc || scoreDiff > maxDiff || scorerDiff > maxDiff) { System.Text.StringBuilder sbord = new System.Text.StringBuilder(); for (int i = 0; i < order.Length; i++) { sbord.Append(order[i] == skip_op?" skip()":" next()"); } throw new System.SystemException("ERROR matching docs:" + "\n\t" + (doc != scorerDoc ? "--> " : "") + "scorerDoc=" + scorerDoc + "\n\t" + (!more ? "--> " : "") + "tscorer.more=" + more + "\n\t" + (scoreDiff > maxDiff ? "--> " : "") + "scorerScore=" + scorerScore + " scoreDiff=" + scoreDiff + " maxDiff=" + maxDiff + "\n\t" + (scorerDiff > maxDiff ? "--> " : "") + "scorerScore2=" + scorerScore2 + " scorerDiff=" + scorerDiff + "\n\thitCollector.doc=" + doc + " score=" + score + "\n\t Scorer=" + scorer + "\n\t Query=" + q + " " + q.GetType().FullName + "\n\t Searcher=" + s + "\n\t Order=" + sbord + "\n\t Op=" + (op == skip_op ? " skip()" : " next()")); } } catch (System.IO.IOException e) { throw new System.SystemException("", e); } }
public /*internal*/ BooleanScorer(Similarity similarity, int minNrShouldMatch, System.Collections.IList optionalScorers, System.Collections.IList prohibitedScorers) : base(similarity) { InitBlock(); this.minNrShouldMatch = minNrShouldMatch; if (optionalScorers != null && optionalScorers.Count > 0) { for (System.Collections.IEnumerator si = optionalScorers.GetEnumerator(); si.MoveNext();) { Scorer scorer = (Scorer)si.Current; maxCoord++; if (scorer.NextDoc() != NO_MORE_DOCS) { scorers = new SubScorer(scorer, false, false, bucketTable.NewCollector(0), scorers); } } } if (prohibitedScorers != null && prohibitedScorers.Count > 0) { for (System.Collections.IEnumerator si = prohibitedScorers.GetEnumerator(); si.MoveNext();) { Scorer scorer = (Scorer)si.Current; int mask = nextMask; nextMask = nextMask << 1; prohibitedMask |= mask; // update prohibited mask if (scorer.NextDoc() != NO_MORE_DOCS) { scorers = new SubScorer(scorer, false, true, bucketTable.NewCollector(mask), scorers); } } } coordFactors = new float[maxCoord]; Similarity sim = GetSimilarity(); for (int i = 0; i < maxCoord; i++) { coordFactors[i] = sim.Coord(i, maxCoord - 1); } }
private void AssertAdvance(Scorer expected, Scorer actual, int amount) { if (actual == null) { Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, expected.NextDoc()); return; } int prevDoc = 0; int doc; while ((doc = expected.Advance(prevDoc + amount)) != DocIdSetIterator.NO_MORE_DOCS) { Assert.AreEqual(doc, actual.Advance(prevDoc + amount)); Assert.AreEqual(expected.Freq(), actual.Freq()); float expectedScore = expected.Score(); float actualScore = actual.Score(); Assert.AreEqual(expectedScore, actualScore, CheckHits.ExplainToleranceDelta(expectedScore, actualScore)); prevDoc = doc; } Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, actual.Advance(prevDoc + amount)); }
/* Create the scorer used to score our associated DisjunctionMaxQuery */ public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer, IState state) { Scorer[] scorers = new Scorer[weights.Count]; int idx = 0; foreach (Weight w in weights) { Scorer subScorer = w.Scorer(reader, true, false, state); if (subScorer != null && subScorer.NextDoc(state) != DocIdSetIterator.NO_MORE_DOCS) { scorers[idx++] = subScorer; } } if (idx == 0) { return(null); // all scorers did not have documents } DisjunctionMaxScorer result = new DisjunctionMaxScorer(Enclosing_Instance.tieBreakerMultiplier, similarity, scorers, idx); return(result); }
/// <summary>alternate scorer skipTo(),skipTo(),next(),next(),skipTo(),skipTo(), etc /// and ensure a hitcollector receives same docs and scores /// </summary> public static void CheckSkipTo(Query q, IndexSearcher s) { //System.out.println("Checking "+q); if (BooleanQuery.GetAllowDocsOutOfOrder()) { return; // in this case order of skipTo() might differ from that of next(). } int skip_op = 0; int next_op = 1; int[][] orders = new int[][] { new int[] { next_op }, new int[] { skip_op }, new int[] { skip_op, next_op }, new int[] { next_op, skip_op }, new int[] { skip_op, skip_op, next_op, next_op }, new int[] { next_op, next_op, skip_op, skip_op }, new int[] { skip_op, skip_op, skip_op, next_op, next_op } }; for (int k = 0; k < orders.Length; k++) { int[] order = orders[k]; // System.out.print("Order:");for (int i = 0; i < order.length; i++) // System.out.print(order[i]==skip_op ? " skip()":" next()"); // System.out.println(); int[] opidx = new int[] { 0 }; Weight w = q.Weight(s); Scorer scorer = w.Scorer(s.GetIndexReader(), true, false); if (scorer == null) { continue; } // FUTURE: ensure scorer.doc()==-1 int[] sdoc = new int[] { -1 }; float maxDiff = 1e-5f; s.Search(q, new AnonymousClassCollector(order, opidx, skip_op, scorer, sdoc, maxDiff, q, s)); // make sure next call to scorer is false. int op = order[(opidx[0]++) % order.Length]; // System.out.println(op==skip_op ? "last: skip()":"last: next()"); bool more = (op == skip_op?scorer.Advance(sdoc[0] + 1):scorer.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS; Assert.IsFalse(more); } }
/* Create the scorer used to score our associated DisjunctionMaxQuery */ public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer) { Scorer[] scorers = new Scorer[weights.Count]; int idx = 0; for (System.Collections.IEnumerator iter = weights.GetEnumerator(); iter.MoveNext();) { Weight w = (Weight)iter.Current; Scorer subScorer = w.Scorer(reader, true, false); if (subScorer != null && subScorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { scorers[idx++] = subScorer; } } if (idx == 0) { return(null); // all scorers did not have documents } DisjunctionMaxScorer result = new DisjunctionMaxScorer(Enclosing_Instance.tieBreakerMultiplier, similarity, scorers, idx); return(result); }
public override int NextDoc() { bool more; do { while (bucketTable.first != null) { // more queued current = bucketTable.first; bucketTable.first = current.next; // pop the queue // check prohibited & required, and minNrShouldMatch if ((current.bits & prohibitedMask) == 0 && (current.bits & requiredMask) == requiredMask && current.coord >= minNrShouldMatch) { return(doc = current.doc); } } // refill the queue more = false; end += BucketTable.SIZE; for (SubScorer sub = scorers; sub != null; sub = sub.next) { Scorer scorer = sub.scorer; sub.collector.SetScorer(scorer); int doc = scorer.DocID(); while (doc < end) { sub.collector.Collect(doc); doc = scorer.NextDoc(); } more |= (doc != NO_MORE_DOCS); } }while (bucketTable.first != null || more); return(this.doc = NO_MORE_DOCS); }
public override bool Score(ICollector collector, int max) { // TODO: this may be sort of weird, when we are // embedded in a BooleanScorer, because we are // called for every chunk of 2048 documents. But, // then, scorer is a FakeScorer in that case, so any // Collector doing something "interesting" in // setScorer will be forced to use BS2 anyways: collector.SetScorer(scorer); if (max == DocIdSetIterator.NO_MORE_DOCS) { ScoreAll(collector, scorer); return(false); } else { int doc = scorer.DocID; if (doc < 0) { doc = scorer.NextDoc(); } return(ScoreRange(collector, scorer, doc, max)); } }
public override int NextDoc() { return(doc = countingSumScorer.NextDoc()); }
internal static void ScoreAll(Collector collector, Scorer scorer) { int doc; while ((doc = scorer.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { collector.Collect(doc); } }
public virtual void Collect(int doc) { float score = sc.GetScore(); lastDoc[0] = doc; try { if (scorer == null) { Weight w = s.CreateNormalizedWeight(q); AtomicReaderContext context = readerContextArray[leafPtr]; scorer = w.GetScorer(context, (context.AtomicReader).LiveDocs); } int op = order[(opidx[0]++) % order.Length]; // System.out.println(op==skip_op ? // "skip("+(sdoc[0]+1)+")":"next()"); bool more = op == skip_op?scorer.Advance(scorer.DocID + 1) != DocIdSetIterator.NO_MORE_DOCS : scorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS; int scorerDoc = scorer.DocID; float scorerScore = scorer.GetScore(); float scorerScore2 = scorer.GetScore(); float scoreDiff = Math.Abs(score - scorerScore); float scorerDiff = Math.Abs(scorerScore2 - scorerScore); if (!more || doc != scorerDoc || scoreDiff > maxDiff || scorerDiff > maxDiff) { StringBuilder sbord = new StringBuilder(); for (int i = 0; i < order.Length; i++) { sbord.Append(order[i] == skip_op ? " skip()" : " next()"); } throw new Exception("ERROR matching docs:" + "\n\t" + (doc != scorerDoc ? "--> " : "") + "doc=" + doc + ", scorerDoc=" + scorerDoc + "\n\t" + (!more ? "--> " : "") + "tscorer.more=" + more + "\n\t" + (scoreDiff > maxDiff ? "--> " : "") + "scorerScore=" + scorerScore + " scoreDiff=" + scoreDiff + " maxDiff=" + maxDiff + "\n\t" + (scorerDiff > maxDiff ? "--> " : "") + "scorerScore2=" + scorerScore2 + " scorerDiff=" + scorerDiff + "\n\thitCollector.Doc=" + doc + " score=" + score + "\n\t Scorer=" + scorer + "\n\t Query=" + q + " " + q.GetType().Name + "\n\t Searcher=" + s + "\n\t Order=" + sbord + "\n\t Op=" + (op == skip_op ? " skip()" : " next()")); } } catch (IOException e) { throw new Exception(e.ToString(), e); } }
internal static bool ScoreRange(Collector collector, Scorer scorer, int currentDoc, int end) { while (currentDoc < end) { collector.Collect(currentDoc); currentDoc = scorer.NextDoc(); } return currentDoc != DocIdSetIterator.NO_MORE_DOCS; }
private void SearchWithScorer(IndexReader reader, Weight weight, Scorer scorer, Collector collector) { if (scorer == null) return; scorer.DocID(); int num = scorer.NextDoc(); ; collector.SetScorer(scorer); while (true) { if (num != DocIdSetIterator.NO_MORE_DOCS && !((GroupCollector)collector).GroupLimitReached) { collector.Collect(num); num = scorer.NextDoc(); } else break; } }
private void AssertNext(Scorer expected, Scorer actual) { if (actual == null) { Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, expected.NextDoc()); return; } int doc; while ((doc = expected.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { Assert.AreEqual(doc, actual.NextDoc()); Assert.AreEqual(expected.Freq(), actual.Freq()); float expectedScore = expected.Score(); float actualScore = actual.Score(); Assert.AreEqual(expectedScore, actualScore, CheckHits.ExplainToleranceDelta(expectedScore, actualScore)); } Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, actual.NextDoc()); }
public override bool Next() { return(scorer.NextDoc() != NO_MORE_DOCS); }
[AwaitsFix(BugUrl = "https://github.com/apache/lucenenet/issues/269")] // LUCENENET TODO: this test fails on x86 on .NET Framework in Release mode only #endif public virtual void TestBS2DisjunctionNextVsAdvance() { Directory d = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, d); int numDocs = AtLeast(300); for (int docUpto = 0; docUpto < numDocs; docUpto++) { string contents = "a"; if (Random.Next(20) <= 16) { contents += " b"; } if (Random.Next(20) <= 8) { contents += " c"; } if (Random.Next(20) <= 4) { contents += " d"; } if (Random.Next(20) <= 2) { contents += " e"; } if (Random.Next(20) <= 1) { contents += " f"; } Document doc = new Document(); doc.Add(new TextField("field", contents, Field.Store.NO)); w.AddDocument(doc); } w.ForceMerge(1); IndexReader r = w.GetReader(); IndexSearcher s = NewSearcher(r); w.Dispose(); for (int iter = 0; iter < 10 * RandomMultiplier; iter++) { if (Verbose) { Console.WriteLine("iter=" + iter); } IList <string> terms = new List <string> { "a", "b", "c", "d", "e", "f" }; int numTerms = TestUtil.NextInt32(Random, 1, terms.Count); while (terms.Count > numTerms) { terms.RemoveAt(Random.Next(terms.Count)); } if (Verbose) { Console.WriteLine(" terms=" + terms); } BooleanQuery q = new BooleanQuery(); foreach (string term in terms) { q.Add(new BooleanClause(new TermQuery(new Term("field", term)), Occur.SHOULD)); } Weight weight = s.CreateNormalizedWeight(q); Scorer scorer = weight.GetScorer(s.m_leafContexts[0], null); // First pass: just use .NextDoc() to gather all hits IList <ScoreDoc> hits = new List <ScoreDoc>(); while (scorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { hits.Add(new ScoreDoc(scorer.DocID, scorer.GetScore())); } if (Verbose) { Console.WriteLine(" " + hits.Count + " hits"); } // Now, randomly next/advance through the list and // verify exact match: for (int iter2 = 0; iter2 < 10; iter2++) { weight = s.CreateNormalizedWeight(q); scorer = weight.GetScorer(s.m_leafContexts[0], null); if (Verbose) { Console.WriteLine(" iter2=" + iter2); } int upto = -1; while (upto < hits.Count) { int nextUpto; int nextDoc; int left = hits.Count - upto; if (left == 1 || Random.nextBoolean()) { // next nextUpto = 1 + upto; nextDoc = scorer.NextDoc(); } else { // advance int inc = TestUtil.NextInt32(Random, 1, left - 1); nextUpto = inc + upto; nextDoc = scorer.Advance(hits[nextUpto].Doc); } if (nextUpto == hits.Count) { Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, nextDoc); } else { ScoreDoc hit = hits[nextUpto]; Assert.AreEqual(hit.Doc, nextDoc); // Test for precise float equality: Assert.IsTrue(hit.Score == scorer.GetScore(), "doc " + hit.Doc + " has wrong score: expected=" + hit.Score + " actual=" + scorer.GetScore()); } upto = nextUpto; } } } r.Dispose(); d.Dispose(); }
public override int NextDoc() { return(reqScorer.NextDoc()); }
public override int NextDoc(IState state) { return(scorer.NextDoc(state)); }