public virtual void Generate(IndexReader reader) { TermEnum enumerator = reader.Terms(prefix); TermDocs termDocs = reader.TermDocs(); try { System.String prefixText = prefix.Text(); System.String prefixField = prefix.Field(); do { Term term = enumerator.Term(); if (term != null && term.Text().StartsWith(prefixText) && (System.Object)term.Field() == (System.Object)prefixField) { termDocs.Seek(term); while (termDocs.Next()) { HandleDoc(termDocs.Doc()); } } else { break; } }while (enumerator.Next()); } finally { termDocs.Close(); enumerator.Close(); } }
internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w, byte[] norms) : base(similarity) { InitBlock(enclosingInstance); this.termDocs = reader.TermDocs(null); score = w.GetValue(); this.norms = norms; }
/// <summary> Returns an array of objects which represent that natural order /// of the term values in the given Field. /// /// </summary> /// <param name="reader"> Terms are in this index. /// </param> /// <param name="enumerator">Use this to get the term values and TermDocs. /// </param> /// <param name="fieldname"> Comparables should be for this Field. /// </param> /// <returns> Array of objects representing natural order of terms in Field. /// </returns> /// <throws> IOException If an error occurs reading the index. </throws> protected internal virtual System.IComparable[] FillCache(IndexReader reader, TermEnum enumerator, System.String fieldname) { System.String field = String.Intern(fieldname); System.IComparable[] retArray = new System.IComparable[reader.MaxDoc()]; if (retArray.Length > 0) { TermDocs termDocs = reader.TermDocs(); try { if (enumerator.Term() == null) { throw new System.SystemException("no terms in Field " + field); } do { Term term = enumerator.Term(); if ((System.Object)term.Field() != (System.Object)field) { break; } System.IComparable termval = GetComparable(term.Text()); termDocs.Seek(enumerator); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (enumerator.Next()); } finally { termDocs.Close(); } } return(retArray); }
/// <summary> Returns a BitSet with true for documents which should be /// permitted in search results, and false for those that should /// not. /// </summary> public override System.Collections.BitArray Bits(IndexReader reader) { System.Collections.BitArray bits = new System.Collections.BitArray((reader.MaxDoc() % 64 == 0?reader.MaxDoc() / 64:reader.MaxDoc() / 64 + 1) * 64); TermEnum enumerator = reader.Terms(new Term(field, start)); TermDocs termDocs = reader.TermDocs(); if (enumerator.Term() == null) { return(bits); } try { Term stop = new Term(field, end); while (enumerator.Term().CompareTo(stop) <= 0) { termDocs.Seek(enumerator.Term()); while (termDocs.Next()) { bits.Set(termDocs.Doc(), true); } if (!enumerator.Next()) { break; } } } finally { enumerator.Close(); termDocs.Close(); } return(bits); }
internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w, byte[] norms):base(similarity) { InitBlock(enclosingInstance); this.termDocs = reader.TermDocs(null); score = w.GetValue(); this.norms = norms; }
protected internal override object CreateValue(IndexReader reader, object entryKey) { Entry entry = (Entry)entryKey; System.String field = entry.field; LongParser parser = (LongParser)entry.custom; long[] retArray = new long[reader.MaxDoc()]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term(); if (term == null || (object)term.Field() != (object)field) { break; } long termval = parser.ParseLong(term.Text()); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } return(retArray); }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { System.String field = StringHelper.Intern((System.String)entryKey.field); System.String[] retArray = new System.String[reader.MaxDoc()]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term(); if (term == null || (System.Object)term.Field() != (System.Object)field) { break; } System.String termval = term.Text(); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } return(retArray); }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { Entry entry = (Entry)entryKey; System.String field = entry.field; SortComparator comparator = (SortComparator)entry.custom; System.IComparable[] retArray = new System.IComparable[reader.MaxDoc()]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term(); if (term == null || (System.Object)term.Field() != (System.Object)field) { break; } System.IComparable termval = comparator.GetComparable(term.Text()); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } return(retArray); }
/// <summary> /// Checks to see if the given file is indexed. /// </summary> /// <param name="file">The file to check to see if it's indexed.</param> /// <returns>true if the file is indexed, false if it isn't.</returns> public bool IsIndexed(SourceFile file) { if (file == null || String.IsNullOrWhiteSpace(file.FileName)) { return(false); } if (_writer != null) { using (Lucene.Net.Index.IndexReader reader = _writer.GetReader()) { using (Lucene.Net.Index.TermDocs docs = reader.TermDocs(new Lucene.Net.Index.Term("fileName", file.FileName))) { return(docs.Next()); } } } else if (_searcher != null) { using (Lucene.Net.Index.TermDocs docs = _searcher.IndexReader.TermDocs(new Lucene.Net.Index.Term("fileName", file.FileName))) { return(docs.Next()); } } else { return(false); } }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey, IState state) { Entry entry = entryKey; System.String field = entry.field; IntParser parser = (IntParser)entry.custom; if (parser == null) { try { return(wrapper.GetInts(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_INT_PARSER, state)); } catch (System.FormatException) { return(wrapper.GetInts(reader, field, Lucene.Net.Search.FieldCache_Fields.NUMERIC_UTILS_INT_PARSER, state)); } } int[] retArray = null; TermDocs termDocs = reader.TermDocs(state); TermEnum termEnum = reader.Terms(new Term(field), state); try { do { Term term = termEnum.Term; if (term == null || (System.Object)term.Field != (System.Object)field) { break; } int termval = parser.ParseInt(term.Text); if (retArray == null) { // late init retArray = new int[reader.MaxDoc]; } termDocs.Seek(termEnum, state); while (termDocs.Next(state)) { retArray[termDocs.Doc] = termval; } }while (termEnum.Next(state)); } catch (StopFillCacheException) { } finally { termDocs.Close(); termEnum.Close(); } if (retArray == null) { // no values retArray = new int[reader.MaxDoc]; } return(retArray); }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { Entry entry = (Entry)entryKey; System.String field = entry.field; FloatParser parser = (FloatParser)entry.custom; if (parser == null) { try { return(wrapper.GetFloats(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_FLOAT_PARSER)); } catch (System.FormatException ne) { return(wrapper.GetFloats(reader, field, Lucene.Net.Search.FieldCache_Fields.NUMERIC_UTILS_FLOAT_PARSER)); } } float[] retArray = null; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term(); if (term == null || (System.Object)term.Field() != (System.Object)field) { break; } float termval = parser.ParseFloat(term.Text()); if (retArray == null) { // late init retArray = new float[reader.MaxDoc()]; } termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } catch (StopFillCacheException stop) { } finally { termDocs.Close(); termEnum.Close(); } if (retArray == null) { // no values retArray = new float[reader.MaxDoc()]; } return(retArray); }
public override DocIdSet GetDocIdSet(IndexReader reader) { TermEnum enumerator = query.GetEnum(reader); try { // if current term in enum is null, the enum is empty -> shortcut if (enumerator.Term == null) { return(DocIdSet.EMPTY_DOCIDSET); } // else fill into an OpenBitSet OpenBitSet bitSet = new OpenBitSet(reader.MaxDoc); int[] docs = new int[32]; int[] freqs = new int[32]; TermDocs termDocs = reader.TermDocs(); try { int termCount = 0; do { Term term = enumerator.Term; if (term == null) { break; } termCount++; termDocs.Seek(term); while (true) { int count = termDocs.Read(docs, freqs); if (count != 0) { for (int i = 0; i < count; i++) { bitSet.Set(docs[i]); } } else { break; } } } while (enumerator.Next()); query.IncTotalNumberOfTerms(termCount); // {{Aroush-2.9}} is the use of 'temp' as is right? } finally { termDocs.Close(); } return(bitSet); } finally { enumerator.Close(); } }
protected internal override object CreateValue(IndexReader reader, Entry entryKey) { Entry entry = entryKey; string field = entry.field; Lucene.Net.Search.DoubleParser parser = (Lucene.Net.Search.DoubleParser)entry.custom; if (parser == null) { try { return(wrapper.GetDoubles(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_DOUBLE_PARSER)); } catch (System.FormatException) { return(wrapper.GetDoubles(reader, field, Lucene.Net.Search.FieldCache_Fields.NUMERIC_UTILS_DOUBLE_PARSER)); } } double[] retArray = null; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term; if (term == null || (object)term.Field != (object)field) { break; } double termval = parser.ParseDouble(term.Text); if (retArray == null) { // late init retArray = new double[reader.MaxDoc]; } termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc] = termval; } }while (termEnum.Next()); } catch (StopFillCacheException) { } finally { termDocs.Close(); termEnum.Close(); } if (retArray == null) { // no values retArray = new double[reader.MaxDoc]; } return(retArray); }
// constructor internal ValueSourceScorer(ValueSourceQuery enclosingInstance, Similarity similarity, IndexReader reader, ValueSourceWeight w) : base(similarity) { InitBlock(enclosingInstance); this.weight = w; this.qWeight = w.GetValue(); // this is when/where the values are first created. vals = Enclosing_Instance.valSrc.GetValues(reader); termDocs = reader.TermDocs(null); }
/// <summary>Construct a <code>TermScorer</code>.</summary> /// <param name="weight">The weight of the <code>Term</code> in the query. /// </param> /// <param name="td">An iterator over the documents matching the <code>Term</code>. /// </param> /// <param name="similarity">The </code>Similarity</code> implementation to be used for score computations. /// </param> /// <param name="norms">The field norms of the document fields for the <code>Term</code>. /// </param> public TermScorer(Weight weight, TermDocs td, Similarity similarity, byte[] norms) : base(similarity) { this.weight = weight; this.termDocs = td; this.norms = norms; this.weightValue = weight.GetValue(); for (int i = 0; i < SCORE_CACHE_SIZE; i++) scoreCache[i] = GetSimilarity().Tf(i) * weightValue; }
/// <summary> Construct a <code>TermScorer</code>. /// /// </summary> /// <param name="weight">The weight of the <code>Term</code> in the query. /// </param> /// <param name="td">An iterator over the documents matching the <code>Term</code>. /// </param> /// <param name="similarity">The <code>Similarity</code> implementation to be used for score /// computations. /// </param> /// <param name="norms">The field norms of the document fields for the <code>Term</code>. /// </param> public /*internal*/ TermScorer(Weight weight, TermDocs td, Similarity similarity, byte[] norms):base(similarity) { this.weight = weight; this.termDocs = td; this.norms = norms; this.weightValue = weight.GetValue(); for (int i = 0; i < SCORE_CACHE_SIZE; i++) scoreCache[i] = GetSimilarity().Tf(i) * weightValue; }
public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer) { TermDocs termDocs = reader.TermDocs(Enclosing_Instance.term); if (termDocs == null) { return(null); } return(new TermScorer(this, termDocs, similarity, reader.Norms(Enclosing_Instance.term.Field))); }
public virtual Scorer Scorer(IndexReader reader) { TermDocs termDocs = reader.TermDocs(Enclosing_Instance.term); if (termDocs == null) { return(null); } return(new TermScorer(this, termDocs, similarity, reader.Norms(Enclosing_Instance.term.Field()))); }
/// <summary> Construct a <c>TermScorer</c>. /// /// </summary> /// <param name="weight">The weight of the <c>Term</c> in the query. /// </param> /// <param name="td">An iterator over the documents matching the <c>Term</c>. /// </param> /// <param name="similarity">The <c>Similarity</c> implementation to be used for score /// computations. /// </param> /// <param name="norms">The field norms of the document fields for the <c>Term</c>. /// </param> public /*internal*/ TermScorer(Weight weight, TermDocs td, Similarity similarity, byte[] norms) : base(similarity) { this.weight = weight; this.termDocs = td; this.norms = norms; this.weightValue = weight.Value; for (int i = 0; i < SCORE_CACHE_SIZE; i++) { scoreCache[i] = Similarity.Tf(i) * weightValue; } }
// inherit javadocs public virtual float[] GetFloats(IndexReader reader, System.String field, FloatParser parser) { field = String.Intern(field); System.Object ret = Lookup(reader, field, parser); if (ret == null) { float[] retArray = new float[reader.MaxDoc()]; if (retArray.Length > 0) { TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field, "")); try { if (termEnum.Term() == null) { throw new System.SystemException("no terms in field " + field); } do { Term term = termEnum.Term(); if (term.Field() != field) { break; } float termval; try { termval = SupportClass.Single.Parse(term.Text()); } catch (Exception e) { termval = 0; } termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } } Store(reader, field, parser, retArray); return(retArray); } return((float[])ret); }
// inherit javadocs public virtual System.IComparable[] GetCustom(IndexReader reader, System.String field, SortComparator comparator) { field = String.Intern(field); System.Object ret = Lookup(reader, field, comparator); if (ret == null) { System.IComparable[] retArray = new System.IComparable[reader.MaxDoc()]; if (retArray.Length > 0) { TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field, "")); try { if (termEnum.Term() == null) { throw new System.SystemException("no terms in field " + field); } do { Term term = termEnum.Term(); if (term.Field() != field) { break; } System.IComparable termval = comparator.GetComparable(term.Text()); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } } Store(reader, field, comparator, retArray); return(retArray); } return((System.IComparable[])ret); }
public virtual void Generate(MultiTermQuery query, IndexReader reader, TermEnum enumerator) { int[] docs = new int[32]; int[] freqs = new int[32]; TermDocs termDocs = reader.TermDocs(); try { int termCount = 0; do { Term term = enumerator.Term(); if (term == null) { break; } termCount++; termDocs.Seek(term); while (true) { int count = termDocs.Read(docs, freqs); if (count != 0) { for (int i = 0; i < count; i++) { HandleDoc(docs[i]); } } else { break; } } }while (enumerator.Next()); query.IncTotalNumberOfTerms(termCount); // {{Aroush-2.9}} is the use of 'temp' as is right? } finally { termDocs.Close(); } }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { Entry entry = (Entry)entryKey; System.String field = entry.field; ShortParser parser = (ShortParser)entry.custom; if (parser == null) { return(wrapper.GetShorts(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_SHORT_PARSER)); } short[] retArray = new short[reader.MaxDoc()]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term(); if (term == null || (System.Object)term.Field() != (System.Object)field) { break; } short termval = parser.ParseShort(term.Text()); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } catch (StopFillCacheException stop) { } finally { termDocs.Close(); termEnum.Close(); } return(retArray); }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey, IState state) { Entry entry = entryKey; System.String field = entry.field; ByteParser parser = (ByteParser)entry.custom; if (parser == null) { return(wrapper.GetBytes(reader, field, Lucene.Net.Search.FieldCache_Fields.DEFAULT_BYTE_PARSER, state)); } sbyte[] retArray = new sbyte[reader.MaxDoc]; TermDocs termDocs = reader.TermDocs(state); TermEnum termEnum = reader.Terms(new Term(field), state); try { do { Term term = termEnum.Term; if (term == null || (System.Object)term.Field != (System.Object)field) { break; } sbyte termval = parser.ParseByte(term.Text); termDocs.Seek(termEnum, state); while (termDocs.Next(state)) { retArray[termDocs.Doc] = termval; } }while (termEnum.Next(state)); } catch (StopFillCacheException) { } finally { termDocs.Close(); termEnum.Close(); } return(retArray); }
public virtual void TestMutipleDocument() { RAMDirectory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); Document doc = new Document(); doc.Add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc); doc = new Document(); doc.Add(new Field("partnum", "Q37", Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc); writer.Close(); IndexReader reader = IndexReader.Open(dir); TermDocs td = reader.TermDocs(new Term("partnum", "Q36")); Assert.IsTrue(td.Next()); td = reader.TermDocs(new Term("partnum", "Q37")); Assert.IsTrue(td.Next()); }
// inherit javadocs public virtual int[] GetInts(IndexReader reader, System.String field, IntParser parser) { field = String.Intern(field); System.Object ret = Lookup(reader, field, parser); if (ret == null) { int[] retArray = new int[reader.MaxDoc()]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field, "")); try { do { Term term = termEnum.Term(); if (term == null || term.Field() != field) { break; } int termval = parser.ParseInt(term.Text()); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } Store(reader, field, parser, retArray); return(retArray); } return((int[])ret); }
// inherit javadocs public virtual System.String[] GetStrings(IndexReader reader, System.String field) { field = String.Intern(field); System.Object ret = Lookup(reader, field, SortField.STRING, null); if (ret == null) { System.String[] retArray = new System.String[reader.MaxDoc()]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field, "")); try { do { Term term = termEnum.Term(); if (term == null || term.Field() != field) { break; } System.String termval = term.Text(); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } Store(reader, field, SortField.STRING, null, retArray); return(retArray); } return((System.String[])ret); }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { System.String field = StringHelper.Intern((System.String)entryKey.field); int[] retArray = new int[reader.MaxDoc()]; System.String[] mterms = new System.String[reader.MaxDoc() + 1]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); int t = 0; // current term number // an entry for documents that have no terms in this field // should a document with no terms be at top or bottom? // this puts them at the top - if it is changed, FieldDocSortedHitQueue // needs to change as well. mterms[t++] = null; try { do { Term term = termEnum.Term(); if (term == null || (System.Object)term.Field() != (System.Object)field) { break; } // store term text // we expect that there is at most one term per document if (t >= mterms.Length) { throw new System.SystemException("there are more terms than " + "documents in field \"" + field + "\", but it's impossible to sort on " + "tokenized fields"); } mterms[t] = term.Text(); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = t; } t++; }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } if (t == 0) { // if there are no terms, make the term array // have a single null entry mterms = new System.String[1]; } else if (t < mterms.Length) { // if there are less terms than documents, // trim off the dead array space System.String[] terms = new System.String[t]; Array.Copy(mterms, 0, terms, 0, t); mterms = terms; } StringIndex value_Renamed = new StringIndex(retArray, mterms); return(value_Renamed); }
// constructor internal ValueSourceScorer(ValueSourceQuery enclosingInstance, Similarity similarity, IndexReader reader, ValueSourceWeight w):base(similarity) { InitBlock(enclosingInstance); this.weight = w; this.qWeight = w.GetValue(); // this is when/where the values are first created. vals = Enclosing_Instance.valSrc.GetValues(reader); termDocs = reader.TermDocs(null); }
// inherit javadocs public virtual StringIndex GetStringIndex(IndexReader reader, System.String field) { field = String.Intern(field); System.Object ret = Lookup(reader, field, Lucene.Net.Search.FieldCache_Fields.STRING_INDEX); if (ret == null) { int[] retArray = new int[reader.MaxDoc()]; System.String[] mterms = new System.String[reader.MaxDoc() + 1]; if (retArray.Length > 0) { TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field, "")); int t = 0; // current term number // an entry for documents that have no terms in this field // should a document with no terms be at top or bottom? // this puts them at the top - if it is changed, FieldDocSortedHitQueue // needs to change as well. mterms[t++] = null; try { if (termEnum.Term() == null) { throw new System.SystemException("no terms in field " + field); } do { Term term = termEnum.Term(); if (term.Field() != field) { break; } // store term text // we expect that there is at most one term per document if (t >= mterms.Length) { throw new System.SystemException("there are more terms than " + "documents in field \"" + field + "\", but it's impossible to sort on " + "tokenized fields"); } mterms[t] = term.Text(); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = t; } t++; }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } if (t == 0) { // if there are no terms, make the term array // have a single null entry mterms = new System.String[1]; } else if (t < mterms.Length) { // if there are less terms than documents, // trim off the dead array space System.String[] terms = new System.String[t]; Array.Copy(mterms, 0, terms, 0, t); mterms = terms; } } StringIndex value_Renamed = new StringIndex(retArray, mterms); Store(reader, field, Lucene.Net.Search.FieldCache_Fields.STRING_INDEX, value_Renamed); return(value_Renamed); } return((StringIndex)ret); }
public AnonymousClassDocIdSetIterator(Lucene.Net.Index.TermDocs termDocs, FieldCacheDocIdSet enclosingInstance) { InitBlock(termDocs, enclosingInstance); }
private void InitBlock(Lucene.Net.Index.TermDocs termDocs, FieldCacheDocIdSet enclosingInstance) { this.termDocs = termDocs; this.enclosingInstance = enclosingInstance; }
public override Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.Description = "weight(" + Query + " in " + doc + "), product of:"; Explanation expl = new Explanation(idf, idfExp.Explain()); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight(" + Query + "), product of:"; Explanation boostExpl = new Explanation(Enclosing_Instance.Boost, "boost"); if (Enclosing_Instance.Boost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(expl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * expl.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight string field = Enclosing_Instance.term.Field; ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.Description = "fieldWeight(" + Enclosing_Instance.term + " in " + doc + "), product of:"; Explanation tfExplanation = new Explanation(); int tf = 0; TermDocs termDocs = reader.TermDocs(enclosingInstance.term); if (termDocs != null) { try { if (termDocs.SkipTo(doc) && termDocs.Doc == doc) { tf = termDocs.Freq; } } finally { termDocs.Close(); } tfExplanation.Value = similarity.Tf(tf); tfExplanation.Description = "tf(termFreq(" + enclosingInstance.term + ")=" + tf + ")"; } else { tfExplanation.Value = 0.0f; tfExplanation.Description = "no matching term"; } fieldExpl.AddDetail(tfExplanation); fieldExpl.AddDetail(expl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(field=" + field + ", doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Match = tfExplanation.IsMatch; fieldExpl.Value = tfExplanation.Value * expl.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); bool?tempAux = fieldExpl.Match; result.Match = tempAux; // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return(fieldExpl); } return(result); }