internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w, byte[] norms):base(similarity) { InitBlock(enclosingInstance); this.termDocs = reader.TermDocs(null); score = w.Value; this.norms = norms; }
// Apply buffered delete terms, queries and docIDs to the // provided reader private bool ApplyDeletes(IndexReader reader, int docIDStart) { lock (this) { int docEnd = docIDStart + reader.MaxDoc; bool any = false; System.Diagnostics.Debug.Assert(CheckDeleteTerm(null)); // Delete by term TermDocs docs = reader.TermDocs(); try { foreach(KeyValuePair<Term, BufferedDeletes.Num> entry in deletesFlushed.terms) { Term term = entry.Key; // LUCENE-2086: we should be iterating a TreeMap, // here, so terms better be in order: System.Diagnostics.Debug.Assert(CheckDeleteTerm(term)); docs.Seek(term); int limit = entry.Value.GetNum(); while (docs.Next()) { int docID = docs.Doc; if (docIDStart + docID >= limit) break; reader.DeleteDocument(docID); any = true; } } } finally { docs.Close(); } // Delete by docID foreach(int docIdInt in deletesFlushed.docIDs) { int docID = docIdInt; if (docID >= docIDStart && docID < docEnd) { reader.DeleteDocument(docID - docIDStart); any = true; } } // Delete by query IndexSearcher searcher = new IndexSearcher(reader); foreach(KeyValuePair<Query, int> entry in deletesFlushed.queries) { Query query = (Query) entry.Key; int limit = (int)entry.Value; Weight weight = query.Weight(searcher); Scorer scorer = weight.Scorer(reader, true, false); if (scorer != null) { while (true) { int doc = scorer.NextDoc(); if (((long) docIDStart) + doc >= limit) break; reader.DeleteDocument(doc); any = true; } } } searcher.Close(); return any; } }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { System.String field = StringHelper.Intern(entryKey.field); int[] retArray = new int[reader.MaxDoc]; System.String[] mterms = new System.String[reader.MaxDoc + 1]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); int t = 0; // current term number // an entry for documents that have no terms in this field // should a document with no terms be at top or bottom? // this puts them at the top - if it is changed, FieldDocSortedHitQueue // needs to change as well. mterms[t++] = null; try { do { Term term = termEnum.Term; if (term == null || term.Field != field || t >= mterms.Length) break; // store term text mterms[t] = term.Text; termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc] = t; } t++; } while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } if (t == 0) { // if there are no terms, make the term array // have a single null entry mterms = new System.String[1]; } else if (t < mterms.Length) { // if there are less terms than documents, // trim off the dead array space System.String[] terms = new System.String[t]; Array.Copy(mterms, 0, terms, 0, t); mterms = terms; } StringIndex value_Renamed = new StringIndex(retArray, mterms); return value_Renamed; }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { System.String field = StringHelper.Intern(entryKey.field); System.String[] retArray = new System.String[reader.MaxDoc]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term; if (term == null || (System.Object) term.Field != (System.Object) field) break; System.String termval = term.Text; termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc] = termval; } } while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } return retArray; }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { Entry entry = entryKey; System.String field = entry.field; Lucene.Net.Search.DoubleParser parser = (Lucene.Net.Search.DoubleParser) entry.custom; if (parser == null) { try { return wrapper.GetDoubles(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_DOUBLE_PARSER); } catch (System.FormatException) { return wrapper.GetDoubles(reader, field, Lucene.Net.Search.FieldCache_Fields.NUMERIC_UTILS_DOUBLE_PARSER); } } double[] retArray = null; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term; if (term == null || (System.Object) term.Field != (System.Object) field) break; double termval = parser.ParseDouble(term.Text); if (retArray == null) // late init retArray = new double[reader.MaxDoc]; termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc] = termval; } } while (termEnum.Next()); } catch (StopFillCacheException) { } finally { termDocs.Close(); termEnum.Close(); } if (retArray == null) // no values retArray = new double[reader.MaxDoc]; return retArray; }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { Entry entry = entryKey; System.String field = entry.field; ShortParser parser = (ShortParser) entry.custom; if (parser == null) { return wrapper.GetShorts(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_SHORT_PARSER); } short[] retArray = new short[reader.MaxDoc]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term; if (term == null || (System.Object) term.Field != (System.Object) field) break; short termval = parser.ParseShort(term.Text); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc] = termval; } } while (termEnum.Next()); } catch (StopFillCacheException) { } finally { termDocs.Close(); termEnum.Close(); } return retArray; }
// constructor internal ValueSourceScorer(ValueSourceQuery enclosingInstance, Similarity similarity, IndexReader reader, ValueSourceWeight w) : base(similarity) { InitBlock(enclosingInstance); this.weight = w; this.qWeight = w.Value; // this is when/where the values are first created. vals = Enclosing_Instance.valSrc.GetValues(reader); termDocs = reader.TermDocs(null); }
public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer) { TermDocs termDocs = reader.TermDocs(Enclosing_Instance.term); if (termDocs == null) return null; return new TermScorer(this, termDocs, similarity, reader.Norms(Enclosing_Instance.term.Field)); }
public override Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.Description = "weight(" + Query + " in " + doc + "), product of:"; Explanation expl = new Explanation(idf, idfExp.Explain()); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight(" + Query + "), product of:"; Explanation boostExpl = new Explanation(Enclosing_Instance.Boost, "boost"); if (Enclosing_Instance.Boost != 1.0f) queryExpl.AddDetail(boostExpl); queryExpl.AddDetail(expl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * expl.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight System.String field = Enclosing_Instance.term.Field; ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.Description = "fieldWeight(" + Enclosing_Instance.term + " in " + doc + "), product of:"; Explanation tfExplanation = new Explanation(); int tf = 0; TermDocs termDocs = reader.TermDocs(enclosingInstance.term); if (termDocs != null) { try { if (termDocs.SkipTo(doc) && termDocs.Doc == doc) { tf = termDocs.Freq; } } finally { termDocs.Close(); } tfExplanation.Value = similarity.Tf(tf); tfExplanation.Description = "tf(termFreq(" + enclosingInstance.term + ")=" + tf + ")"; } else { tfExplanation.Value = 0.0f; tfExplanation.Description = "no matching term"; } fieldExpl.AddDetail(tfExplanation); fieldExpl.AddDetail(expl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]):1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(field=" + field + ", doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Match = tfExplanation.IsMatch; fieldExpl.Value = tfExplanation.Value * expl.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); System.Boolean? tempAux = fieldExpl.Match; result.Match = tempAux; // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) return fieldExpl; return result; }
protected internal virtual TermDocs TermDocs(IndexReader reader) { return term == null ? reader.TermDocs(null):reader.TermDocs(); }