public virtual bool Next() { if (moreInclude) { // move to next include moreInclude = includeSpans.Next(); } while (moreInclude && moreExclude) { if (includeSpans.Doc() > excludeSpans.Doc()) { // skip exclude moreExclude = excludeSpans.SkipTo(includeSpans.Doc()); } while (moreExclude && includeSpans.Doc() == excludeSpans.Doc() && excludeSpans.End() <= includeSpans.Start()) { moreExclude = excludeSpans.Next(); // increment exclude } if (!moreExclude || includeSpans.Doc() != excludeSpans.Doc() || includeSpans.End() <= excludeSpans.Start()) { break; // we found a match } moreInclude = includeSpans.Next(); // intersected: keep scanning } return(moreInclude); }
public override bool Next() { if (firstTime) { more = spans.Next(); firstTime = false; } if (!more) { return(false); } freq = 0.0f; doc = spans.Doc(); while (more && doc == spans.Doc()) { int matchLength = spans.End() - spans.Start(); freq += GetSimilarity().SloppyFreq(matchLength); more = spans.Next(); } return(more || freq != 0.0f); }
private void GetPayloads(ICollection <byte[]> payloads, SpanQuery query) { IDictionary <Term, TermContext> termContexts = new Dictionary <Term, TermContext>(); var terms = new SortedSet <Term>(); query.ExtractTerms(terms); foreach (Term term in terms) { termContexts[term] = TermContext.Build(context, term); } foreach (AtomicReaderContext atomicReaderContext in context.Leaves) { Spans spans = query.GetSpans(atomicReaderContext, atomicReaderContext.AtomicReader.LiveDocs, termContexts); while (spans.Next() == true) { if (spans.IsPayloadAvailable) { var payload = spans.GetPayload(); foreach (var bytes in payload) { payloads.Add(bytes); } } } } }
public virtual bool Next() { while (spans.Next()) { // scan to next match if (End() <= Enclosing_Instance.end) { return(true); } } return(false); }
public virtual void TestIgnoreSpanScorer() { PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction(), false); IndexReader reader = DirectoryReader.Open(Directory); IndexSearcher theSearcher = NewSearcher(reader); theSearcher.Similarity = new FullSimilarity(); TopDocs hits = Searcher.Search(query, null, 100); Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); Assert.IsTrue(hits.TotalHits == 100, "hits Size: " + hits.TotalHits + " is not: " + 100); //they should all have the exact same score, because they all contain seventy once, and we set //all the other similarity factors to be 1 //System.out.println("Hash: " + seventyHash + " Twice Hash: " + 2*seventyHash); Assert.IsTrue(hits.MaxScore == 4.0, hits.MaxScore + " does not equal: " + 4.0); //there should be exactly 10 items that score a 4, all the rest should score a 2 //The 10 items are: 70 + i*100 where i in [0-9] int numTens = 0; for (int i = 0; i < hits.ScoreDocs.Length; i++) { ScoreDoc doc = hits.ScoreDocs[i]; if (doc.Doc % 10 == 0) { numTens++; Assert.IsTrue(doc.Score == 4.0, doc.Score + " does not equal: " + 4.0); } else { Assert.IsTrue(doc.Score == 2, doc.Score + " does not equal: " + 2); } } Assert.IsTrue(numTens == 10, numTens + " does not equal: " + 10); CheckHits.CheckExplanations(query, "field", Searcher, true); Spans spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, query); Assert.IsTrue(spans != null, "spans is null and it shouldn't be"); //should be two matches per document int count = 0; //100 hits times 2 matches per hit, we should have 200 in count while (spans.Next()) { count++; } reader.Dispose(); }
public /*protected internal*/ virtual bool SetFreqCurrentDoc() { if (!more) { return(false); } doc = spans.Doc(); freq = 0.0f; do { int matchLength = spans.End() - spans.Start(); freq += Similarity.SloppyFreq(matchLength); more = spans.Next(); }while (more && (doc == spans.Doc())); return(true); }
// protected override bool SetFreqCurrentDoc() { if (!m_more) { return(false); } m_doc = spans.Doc; m_freq = 0.0f; m_payloadScore = 0; payloadsSeen = 0; do { int matchLength = spans.End - spans.Start; m_freq += m_docScorer.ComputeSlopFactor(matchLength); Spans[] spansArr = new Spans[1]; spansArr[0] = spans; GetPayloads(spansArr); m_more = spans.Next(); } while (m_more && (m_doc == spans.Doc)); return(true); }
// protected internal override bool SetFreqCurrentDoc() { if (!More) { return(false); } Doc = Spans.Doc(); Freq_Renamed = 0.0f; PayloadScore = 0; PayloadsSeen = 0; do { int matchLength = Spans.End() - Spans.Start(); Freq_Renamed += DocScorer.ComputeSlopFactor(matchLength); Spans[] spansArr = new Spans[1]; spansArr[0] = Spans; GetPayloads(spansArr); More = Spans.Next(); } while (More && (Doc == Spans.Doc())); return(true); }
public virtual bool Next() { if (firstTime) { // first time -- initialize for (int i = 0; i < all.Count; i++) { Spans spans = (Spans)all[i]; if (spans.Next()) { // move to first entry queue.Put(spans); // build queue } else { all.RemoveAt(i--); } } firstTime = false; return(queue.Size() != 0); } if (queue.Size() == 0) { // all done return(false); } if (Top().Next()) { // move to next queue.AdjustTop(); return(true); } all.Remove(queue.Pop()); // exhausted a clause return(queue.Size() != 0); }
public virtual bool Next() { if (length != -1) { // subtract old length Enclosing_Instance.totalLength -= length; } bool more = spans.Next(); // move to next if (more) { length = End() - Start(); // compute new length Enclosing_Instance.totalLength += length; // add new length to total if (Enclosing_Instance.max == null || Doc() > Enclosing_Instance.max.Doc() || (Doc() == Enclosing_Instance.max.Doc() && End() > Enclosing_Instance.max.End())) { Enclosing_Instance.max = this; } } return(more); }
protected internal override bool SetFreqCurrentDoc() { if (!More) { return(false); } Doc = Spans.Doc(); Freq_Renamed = 0.0f; NumMatches = 0; PayloadScore_Renamed = 0; PayloadsSeen = 0; while (More && Doc == Spans.Doc()) { int matchLength = Spans.End() - Spans.Start(); Freq_Renamed += DocScorer.ComputeSlopFactor(matchLength); NumMatches++; ProcessPayload(OuterInstance.Similarity); More = Spans.Next(); // this moves positions to the next match in this // document } return(More || (Freq_Renamed != 0)); }