internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w, byte[] norms):base(similarity) { InitBlock(enclosingInstance); this.termDocs = reader.TermDocs(null); score = w.GetValue(); this.norms = norms; }
private float freq; //prhase frequency in current doc as computed by phraseFreq(). internal PhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, byte[] norms):base(similarity) { this.norms = norms; this.weight = weight; this.value_Renamed = weight.GetValue(); // convert tps to a list of phrase positions. // note: phrase-position differs from term-position in that its position // reflects the phrase offset: pp.pos = tp.pos - offset. // this allows to easily identify a matching (exact) phrase // when all PhrasePositions have exactly the same position. for (int i = 0; i < tps.Length; i++) { PhrasePositions pp = new PhrasePositions(tps[i], offsets[i]); if (last != null) { // add next to end of list last.next = pp; } else { first = pp; } last = pp; } pq = new PhraseQueue(tps.Length); // construct empty pq first.doc = - 1; }
public /*internal*/ bool debugCheckedForDeletions = false; // for test purposes. internal Hits(Searcher s, Query q, Filter f) { weight = q.Weight(s); searcher = s; filter = f; nDeletions = CountDeletions(s); GetMoreDocs(50); // retrieve 100 initially lengthAtStart = length; }
/// <summary> Construct a <code>TermScorer</code>. /// /// </summary> /// <param name="weight">The weight of the <code>Term</code> in the query. /// </param> /// <param name="td">An iterator over the documents matching the <code>Term</code>. /// </param> /// <param name="similarity">The <code>Similarity</code> implementation to be used for score /// computations. /// </param> /// <param name="norms">The field norms of the document fields for the <code>Term</code>. /// </param> public /*internal*/ TermScorer(Weight weight, TermDocs td, Similarity similarity, byte[] norms):base(similarity) { this.weight = weight; this.termDocs = td; this.norms = norms; this.weightValue = weight.GetValue(); for (int i = 0; i < SCORE_CACHE_SIZE; i++) scoreCache[i] = GetSimilarity().Tf(i) * weightValue; }
/// <summary> A search implementation which spans a new thread for each /// Searchable, waits for each search to complete and merge /// the results back together. /// </summary> public override TopDocs Search(Weight weight, Filter filter, int nDocs) { HitQueue hq = new HitQueue(nDocs, false); int totalHits = 0; MultiSearcherThread[] msta = new MultiSearcherThread[searchables.Length]; for (int i = 0; i < searchables.Length; i++) { // search each searchable // Assume not too many searchables and cost of creating a thread is by far inferior to a search msta[i] = new MultiSearcherThread(searchables[i], weight, filter, nDocs, hq, i, starts, "MultiSearcher thread #" + (i + 1)); msta[i].Start(); } for (int i = 0; i < searchables.Length; i++) { try { msta[i].Join(); } catch (System.Threading.ThreadInterruptedException ie) { // In 3.0 we will change this to throw // InterruptedException instead SupportClass.ThreadClass.Current().Interrupt(); throw new System.SystemException(ie.Message, ie); } System.IO.IOException ioe = msta[i].GetIOException(); if (ioe == null) { totalHits += msta[i].Hits(); } else { // if one search produced an IOException, rethrow it throw ioe; } } ScoreDoc[] scoreDocs = new ScoreDoc[hq.Size()]; for (int i = hq.Size() - 1; i >= 0; i--) // put docs in array scoreDocs[i] = (ScoreDoc) hq.Pop(); float maxScore = (totalHits == 0)?System.Single.NegativeInfinity:scoreDocs[0].score; return new TopDocs(totalHits, scoreDocs, maxScore); }
public ConstantScorer(ConstantScoreQuery enclosingInstance, Similarity similarity, IndexReader reader, Weight w):base(similarity) { InitBlock(enclosingInstance); theScore = w.GetValue(); DocIdSet docIdSet = Enclosing_Instance.filter.GetDocIdSet(reader); if (docIdSet == null) { docIdSetIterator = DocIdSet.EMPTY_DOCIDSET.Iterator(); } else { DocIdSetIterator iter = docIdSet.Iterator(); if (iter == null) { docIdSetIterator = DocIdSet.EMPTY_DOCIDSET.Iterator(); } else { docIdSetIterator = iter; } } }
public MultiSearcherThread(Searchable searchable, Weight weight, Filter filter, int nDocs, FieldDocSortedHitQueue hq, Sort sort, int i, int[] starts, System.String name):base(name) { this.searchable = searchable; this.weight = weight; this.filter = filter; this.nDocs = nDocs; this.hq = hq; this.i = i; this.starts = starts; this.sort = sort; }
public override Explanation Explain(Weight weight, int doc) { int n = ReaderUtil.SubIndex(doc, docStarts); int deBasedDoc = doc - docStarts[n]; return weight.Explain(subReaders[n], deBasedDoc); }
public override TopFieldDocs Search(Weight weight, Filter filter, int n, Sort sort) { FieldDocSortedHitQueue hq = null; int totalHits = 0; float maxScore = System.Single.NegativeInfinity; for (int i = 0; i < searchables.Length; i++) { // search each searcher TopFieldDocs docs = searchables[i].Search(weight, filter, n, sort); // If one of the Sort fields is FIELD_DOC, need to fix its values, so that // it will break ties by doc Id properly. Otherwise, it will compare to // 'relative' doc Ids, that belong to two different searchers. for (int j = 0; j < docs.fields.Length; j++) { if (docs.fields[j].GetType() == SortField.DOC) { // iterate over the score docs and change their fields value for (int j2 = 0; j2 < docs.ScoreDocs.Length; j2++) { FieldDoc fd = (FieldDoc) docs.ScoreDocs[j2]; fd.fields[j] = (System.Int32) (((System.Int32) fd.fields[j]) + starts[i]); } break; } } if (hq == null) hq = new FieldDocSortedHitQueue(docs.fields, n); totalHits += docs.TotalHits; // update totalHits maxScore = System.Math.Max(maxScore, docs.GetMaxScore()); ScoreDoc[] scoreDocs = docs.ScoreDocs; for (int j = 0; j < scoreDocs.Length; j++) { // merge scoreDocs into hq ScoreDoc scoreDoc = scoreDocs[j]; scoreDoc.doc += starts[i]; // convert doc if (!hq.Insert(scoreDoc)) break; // no more scores > minScore } } ScoreDoc[] scoreDocs2 = new ScoreDoc[hq.Size()]; for (int i = hq.Size() - 1; i >= 0; i--) // put docs in array scoreDocs2[i] = (ScoreDoc) hq.Pop(); return new TopFieldDocs(totalHits, scoreDocs2, hq.GetFields(), maxScore); }
public override Explanation Explain(Weight weight, int doc) { int i = SubSearcher(doc); // find searcher index return searchables[i].Explain(weight, doc - starts[i]); // dispatch to searcher }
public override Explanation Explain(Weight weight, int doc) { throw new System.NotSupportedException(); }
public override TopFieldDocs Search(Weight weight, Filter filter, int n, Sort sort) { throw new System.NotSupportedException(); }
public virtual void Search(Weight weight, Filter filter, HitCollector results) { Search(weight, filter, new HitCollectorWrapper(results)); }
abstract public TopFieldDocs Search(Weight weight, Filter filter, int n, Sort sort);
abstract public Explanation Explain(Weight weight, int doc);
abstract public TopDocs Search(Weight weight, Filter filter, int n);
abstract public void Search(Weight weight, Filter filter, Collector results);
// inherit javadoc public override TopDocs Search(Weight weight, Filter filter, int nDocs) { if (nDocs <= 0) { throw new System.ArgumentException("nDocs must be > 0"); } nDocs = System.Math.Min(nDocs, reader.MaxDoc()); TopScoreDocCollector collector = TopScoreDocCollector.create(nDocs, !weight.ScoresDocsOutOfOrder()); Search(weight, filter, collector); return collector.TopDocs(); }
public override TopFieldDocs Search(Weight weight, Filter filter, int nDocs, Sort sort) { return Search(weight, filter, nDocs, sort, true); }
public override void Search(Weight weight, Filter filter, Collector results) { throw new System.NotSupportedException(); }
/// <summary> Just like {@link #Search(Weight, Filter, int, Sort)}, but you choose /// whether or not the fields in the returned {@link FieldDoc} instances /// should be set by specifying fillFields.<br/> /// /// <p/> /// NOTE: this does not compute scores by default. If you need scores, create /// a {@link TopFieldCollector} instance by calling /// {@link TopFieldCollector#create} and then pass that to /// {@link #Search(Weight, Filter, Collector)}. /// <p/> /// </summary> public virtual TopFieldDocs Search(Weight weight, Filter filter, int nDocs, Sort sort, bool fillFields) { nDocs = System.Math.Min(nDocs, reader.MaxDoc()); SortField[] fields = sort.fields; bool legacy = false; for (int i = 0; i < fields.Length; i++) { SortField field = fields[i]; System.String fieldname = field.GetField(); int type = field.GetType(); // Resolve AUTO into its true type if (type == SortField.AUTO) { int autotype = SortField.DetectFieldType(reader, fieldname); if (autotype == SortField.STRING) { fields[i] = new SortField(fieldname, field.GetLocale(), field.GetReverse()); } else { fields[i] = new SortField(fieldname, autotype, field.GetReverse()); } } if (field.GetUseLegacySearch()) { legacy = true; } } if (legacy) { // Search the single top-level reader TopDocCollector collector = new TopFieldDocCollector(reader, sort, nDocs); HitCollectorWrapper hcw = new HitCollectorWrapper(collector); hcw.SetNextReader(reader, 0); if (filter == null) { Scorer scorer = weight.Scorer(reader, true, true); if (scorer != null) { scorer.Score(hcw); } } else { SearchWithFilter(reader, weight, filter, hcw); } return (TopFieldDocs) collector.TopDocs(); } TopFieldCollector collector2 = TopFieldCollector.create(sort, nDocs, fillFields, fieldSortDoTrackScores, fieldSortDoMaxScore, !weight.ScoresDocsOutOfOrder()); Search(weight, filter, collector2); return (TopFieldDocs) collector2.TopDocs(); }
public override TopDocs Search(Weight weight, Filter filter, int nDocs) { HitQueue hq = new HitQueue(nDocs, false); int totalHits = 0; for (int i = 0; i < searchables.Length; i++) { // search each searcher TopDocs docs = searchables[i].Search(weight, filter, nDocs); totalHits += docs.TotalHits; // update totalHits ScoreDoc[] scoreDocs = docs.ScoreDocs; for (int j = 0; j < scoreDocs.Length; j++) { // merge scoreDocs into hq ScoreDoc scoreDoc = scoreDocs[j]; scoreDoc.doc += starts[i]; // convert doc if (!hq.Insert(scoreDoc)) break; // no more scores > minScore } } ScoreDoc[] scoreDocs2 = new ScoreDoc[hq.Size()]; for (int i = hq.Size() - 1; i >= 0; i--) // put docs in array scoreDocs2[i] = (ScoreDoc) hq.Pop(); float maxScore = (totalHits == 0)?System.Single.NegativeInfinity:scoreDocs2[0].score; return new TopDocs(totalHits, scoreDocs2, maxScore); }
public override void Search(Weight weight, Filter filter, Collector collector) { if (filter == null) { for (int i = 0; i < subReaders.Length; i++) { // search each subreader collector.SetNextReader(subReaders[i], docStarts[i]); Scorer scorer = weight.Scorer(subReaders[i], !collector.AcceptsDocsOutOfOrder(), true); if (scorer != null) { scorer.Score(collector); } } } else { for (int i = 0; i < subReaders.Length; i++) { // search each subreader collector.SetNextReader(subReaders[i], docStarts[i]); SearchWithFilter(subReaders[i], weight, filter, collector); } } }
// inherit javadoc public override void Search(Weight weight, Filter filter, Collector collector) { for (int i = 0; i < searchables.Length; i++) { int start = starts[i]; Collector hc = new AnonymousClassCollector(collector, start, this); searchables[i].Search(weight, filter, hc); } }
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector) { System.Diagnostics.Debug.Assert(filter != null); Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return ; } int docID = scorer.DocID(); System.Diagnostics.Debug.Assert(docID == - 1 || docID == DocIdSetIterator.NO_MORE_DOCS); // CHECKME: use ConjunctionScorer here? DocIdSet filterDocIdSet = filter.GetDocIdSet(reader); if (filterDocIdSet == null) { // this means the filter does not accept any documents. return ; } DocIdSetIterator filterIter = filterDocIdSet.Iterator(); if (filterIter == null) { // this means the filter does not accept any documents. return ; } int filterDoc = filterIter.NextDoc(); int scorerDoc = scorer.Advance(filterDoc); collector.SetScorer(scorer); while (true) { if (scorerDoc == filterDoc) { // Check if scorer has exhausted, only before collecting. if (scorerDoc == DocIdSetIterator.NO_MORE_DOCS) { break; } collector.Collect(scorerDoc); filterDoc = filterIter.NextDoc(); scorerDoc = scorer.Advance(filterDoc); } else if (scorerDoc > filterDoc) { filterDoc = filterIter.Advance(scorerDoc); } else { scorerDoc = scorer.Advance(filterDoc); } } }
internal SloppyPhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, int slop, byte[] norms):base(weight, tps, offsets, similarity, norms) { this.slop = slop; }
internal ExactPhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, byte[] norms):base(weight, tps, offsets, similarity, norms) { }