public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, IBits acceptDocs)
 {
     //DocIdSetIterator disi;
     if (outerInstance.m_filter != null)
     {
         if (Debugging.AssertsEnabled)
         {
             Debugging.Assert(outerInstance.m_query == null);
         }
         return(base.GetBulkScorer(context, scoreDocsInOrder, acceptDocs));
     }
     else
     {
         if (Debugging.AssertsEnabled)
         {
             Debugging.Assert(outerInstance.m_query != null && innerWeight != null);
         }
         BulkScorer bulkScorer = innerWeight.GetBulkScorer(context, scoreDocsInOrder, acceptDocs);
         if (bulkScorer == null)
         {
             return(null);
         }
         return(new ConstantBulkScorer(outerInstance, bulkScorer, this, queryWeight));
     }
 }
Example #2
0
 /// <summary>
 /// Lower-level search API.
 ///
 /// <para/>
 /// <seealso cref="ICollector.Collect(int)"/> is called for every document.
 ///
 /// <para/>
 /// NOTE: this method executes the searches on all given leaves exclusively.
 /// To search across all the searchers leaves use <see cref="m_leafContexts"/>.
 /// </summary>
 /// <param name="leaves">
 ///          The searchers leaves to execute the searches on </param>
 /// <param name="weight">
 ///          To match documents </param>
 /// <param name="collector">
 ///          To receive hits </param>
 /// <exception cref="BooleanQuery.TooManyClausesException"> If a query would exceed
 ///         <see cref="BooleanQuery.MaxClauseCount"/> clauses. </exception>
 protected virtual void Search(IList <AtomicReaderContext> leaves, Weight weight, ICollector collector)
 {
     // TODO: should we make this
     // threaded...?  the Collector could be sync'd?
     // always use single thread:
     foreach (AtomicReaderContext ctx in leaves) // search each subreader
     {
         try
         {
             collector.SetNextReader(ctx);
         }
         catch (CollectionTerminatedException)
         {
             // there is no doc of interest in this reader context
             // continue with the following leaf
             continue;
         }
         BulkScorer scorer = weight.GetBulkScorer(ctx, !collector.AcceptsDocsOutOfOrder, ctx.AtomicReader.LiveDocs);
         if (scorer != null)
         {
             try
             {
                 scorer.Score(collector);
             }
             catch (CollectionTerminatedException)
             {
                 // collection was terminated prematurely
                 // continue with the following leaf
             }
         }
     }
 }
Example #3
0
        public virtual void Test()
        {
            Term      allTerm   = new Term(FIELD, "all");
            TermQuery termQuery = new TermQuery(allTerm);

            Weight weight = IndexSearcher.CreateNormalizedWeight(termQuery);

            Assert.IsTrue(IndexSearcher.TopReaderContext is AtomicReaderContext);
            AtomicReaderContext context = (AtomicReaderContext)IndexSearcher.TopReaderContext;
            BulkScorer          ts      = weight.GetBulkScorer(context, true, (context.AtomicReader).LiveDocs);
            // we have 2 documents with the term all in them, one document for all the
            // other values
            IList <TestHit> docs = new List <TestHit>();

            // must call next first

            ts.Score(new CollectorAnonymousInnerClassHelper(this, context, docs));
            Assert.IsTrue(docs.Count == 2, "docs Size: " + docs.Count + " is not: " + 2);
            TestHit doc0 = docs[0];
            TestHit doc5 = docs[1];

            // The scores should be the same
            Assert.IsTrue(doc0.Score == doc5.Score, doc0.Score + " does not equal: " + doc5.Score);

            /*
             * Score should be (based on Default Sim.: All floats are approximate tf = 1
             * numDocs = 6 docFreq(all) = 2 idf = ln(6/3) + 1 = 1.693147 idf ^ 2 =
             * 2.8667 boost = 1 lengthNorm = 1 //there is 1 term in every document coord
             * = 1 sumOfSquaredWeights = (idf * boost) ^ 2 = 1.693147 ^ 2 = 2.8667
             * queryNorm = 1 / (sumOfSquaredWeights)^0.5 = 1 /(1.693147) = 0.590
             *
             * score = 1 * 2.8667 * 1 * 1 * 0.590 = 1.69
             */
            Assert.IsTrue(doc0.Score == 1.6931472f, doc0.Score + " does not equal: " + 1.6931472f);
        }
Example #4
0
        public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, IBits acceptDocs)
        {
            // if the caller asks for in-order scoring or if the weight does not support
            // out-of order scoring then collection will have to happen in-order.
            BulkScorer inScorer = @in.GetBulkScorer(context, scoreDocsInOrder, acceptDocs);

            if (inScorer == null)
            {
                return(null);
            }

            if (AssertingBulkScorer.ShouldWrap(inScorer))
            {
                // The incoming scorer already has a specialized
                // implementation for BulkScorer, so we should use it:
                inScorer = AssertingBulkScorer.Wrap(new Random(Random.Next()), inScorer);
            }
            else if (Random.NextBoolean())
            {
                // Let super wrap this.scorer instead, so we use
                // AssertingScorer:
                inScorer = base.GetBulkScorer(context, scoreDocsInOrder, acceptDocs);
            }

            if (scoreDocsInOrder == false && Random.NextBoolean())
            {
                // The caller claims it can handle out-of-order
                // docs; let's confirm that by pulling docs and
                // randomly shuffling them before collection:
                inScorer = new AssertingBulkOutOfOrderScorer(new Random(Random.Next()), inScorer);
            }
            return(inScorer);
        }
Example #5
0
        public virtual void TestBooleanScorerMax()
        {
            Directory         dir = NewDirectory();
            RandomIndexWriter riw = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));

            int docCount = AtLeast(10000);

            for (int i = 0; i < docCount; i++)
            {
                Document doc = new Document();
                doc.Add(NewField("field", "a", TextField.TYPE_NOT_STORED));
                riw.AddDocument(doc);
            }

            riw.ForceMerge(1);
            IndexReader r = riw.GetReader();

            riw.Dispose();

            IndexSearcher s  = NewSearcher(r);
            BooleanQuery  bq = new BooleanQuery();

            bq.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
            bq.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);

            Weight w = s.CreateNormalizedWeight(bq);

            Assert.AreEqual(1, s.IndexReader.Leaves.Count);
            BulkScorer scorer = w.GetBulkScorer(s.IndexReader.Leaves[0], false, null);

            FixedBitSet hits = new FixedBitSet(docCount);
            AtomicInt32 end  = new AtomicInt32();
            ICollector  c    = new CollectorAnonymousClass(this, scorer, hits, end);

            while (end < docCount)
            {
                int inc = TestUtil.NextInt32(Random, 1, 1000);
                end.AddAndGet(inc);
                scorer.Score(c, end);
            }

            Assert.AreEqual(docCount, hits.Cardinality);
            r.Dispose();
            dir.Dispose();
        }