public override long NextOrd()
 {
     if (ordUpto == ordLimit)
     {
         return(NO_MORE_ORDS);
     }
     else
     {
         return(ords.Get(ordUpto++));
     }
 }
Beispiel #2
0
            public override float Score(int doc, float freq)
            {
                // if there are no norms, we act as if b=0
                float norm = Norms == null ? OuterInstance.K1_Renamed : Cache[(sbyte)Norms.Get(doc) & 0xFF];

                return(WeightValue * freq / (freq + norm));
            }
Beispiel #3
0
            public override float Score(int doc, float freq)
            {
                // if there are no norms, we act as if b=0
                float norm = norms == null ? outerInstance.k1 : cache[(sbyte)norms.Get(doc) & 0xFF];

                return(weightValue * freq / (freq + norm));
            }
Beispiel #4
0
        private static void AssertSorted(AtomicReader reader)
        {
            NumericDocValues ndv = reader.GetNumericDocValues("ndv");

            for (int i = 1; i < reader.MaxDoc; ++i)
            {
                assertTrue("ndv(" + (i - 1) + ")=" + ndv.Get(i - 1) + ",ndv(" + i + ")=" + ndv.Get(i), ndv.Get(i - 1) <= ndv.Get(i));
            }
        }
Beispiel #5
0
        public virtual void TestNormValues()
        {
            NumericDocValues dv = reader.GetNormValues(NORMS_FIELD);
            int maxDoc          = reader.MaxDoc;

            for (int i = 0; i < maxDoc; i++)
            {
                assertEquals("incorrect norm value for doc " + i, sortedValues[i], dv.Get(i));
            }
        }
Beispiel #6
0
        public virtual void TestTwoFieldsTwoFormats()
        {
            Analyzer analyzer = new MockAnalyzer(Random);

            Directory directory = NewDirectory();
            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
            IndexWriterConfig iwc  = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
            DocValuesFormat   fast = DocValuesFormat.ForName("Lucene45");
            DocValuesFormat   slow = DocValuesFormat.ForName("SimpleText");

            iwc.SetCodec(new Lucene46CodecAnonymousClass(this, fast, slow));
            IndexWriter iwriter  = new IndexWriter(directory, iwc);
            Document    doc      = new Document();
            string      longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm";
            string      text     = "this is the text to be indexed. " + longTerm;

            doc.Add(NewTextField("fieldname", text, Field.Store.YES));
            doc.Add(new NumericDocValuesField("dv1", 5));
            doc.Add(new BinaryDocValuesField("dv2", new BytesRef("hello world")));
            iwriter.AddDocument(doc);
            iwriter.Dispose();

            // Now search the index:
            IndexReader   ireader   = DirectoryReader.Open(directory); // read-only=true
            IndexSearcher isearcher = NewSearcher(ireader);

            Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits);
            Query   query = new TermQuery(new Term("fieldname", "text"));
            TopDocs hits  = isearcher.Search(query, null, 1);

            Assert.AreEqual(1, hits.TotalHits);
            BytesRef scratch = new BytesRef();

            // Iterate through the results:
            for (int i = 0; i < hits.ScoreDocs.Length; i++)
            {
                Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc);
                Assert.AreEqual(text, hitDoc.Get("fieldname"));
                if (Debugging.AssertsEnabled)
                {
                    Debugging.Assert(ireader.Leaves.Count == 1);
                }
                NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv1");
                Assert.AreEqual(5, dv.Get(hits.ScoreDocs[i].Doc));
                BinaryDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv2");
                dv2.Get(hits.ScoreDocs[i].Doc, scratch);
                Assert.AreEqual(new BytesRef("hello world"), scratch);
            }

            ireader.Dispose();
            directory.Dispose();
        }
Beispiel #7
0
        private IEnumerable <long?> GetMergeNumericFieldEnumerable(/*FieldInfo fieldinfo, // LUCENENET: Never read */ MergeState mergeState, IList <NumericDocValues> toMerge, IList <IBits> docsWithField)
        {
            int              readerUpto           = -1;
            int              docIDUpto            = 0;
            AtomicReader     currentReader        = null;
            NumericDocValues currentValues        = null;
            IBits            currentLiveDocs      = null;
            IBits            currentDocsWithField = null;

            while (true)
            {
                if (readerUpto == toMerge.Count)
                {
                    yield break;
                }

                if (currentReader == null || docIDUpto == currentReader.MaxDoc)
                {
                    readerUpto++;
                    if (readerUpto < toMerge.Count)
                    {
                        currentReader        = mergeState.Readers[readerUpto];
                        currentValues        = toMerge[readerUpto];
                        currentDocsWithField = docsWithField[readerUpto];
                        currentLiveDocs      = currentReader.LiveDocs;
                    }
                    docIDUpto = 0;
                    continue;
                }

                if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpto))
                {
                    long?nextValue;
                    if (currentDocsWithField.Get(docIDUpto))
                    {
                        nextValue = currentValues.Get(docIDUpto);
                    }
                    else
                    {
                        nextValue = null;
                    }

                    docIDUpto++;
                    yield return(nextValue);

                    continue;
                }

                docIDUpto++;
            }
        }
Beispiel #8
0
            /// <summary>
            /// Returns the value of the <code>weightField</code> for the current document.
            /// Retrieves the value for the <code>weightField</code> if its stored (using <code>doc</code>)
            /// or if its indexed as <seealso cref="NumericDocValues"/> (using <code>docId</code>) for the document.
            /// If no value is found, then the weight is 0.
            /// </summary>
            protected internal virtual long GetWeight(Document doc, int docId)
            {
                IndexableField weight = doc.GetField(outerInstance.weightField);

                if (weight != null) // found weight as stored
                {
                    return((weight.NumericValue != null) ? (long)weight.NumericValue : 0);
                } // found weight as NumericDocValue
                else if (weightValues != null)
                {
                    return(weightValues.Get(docId));
                } // fall back
                else
                {
                    return(0);
                }
            }
Beispiel #9
0
            /// <summary>
            /// Returns the value of the <see cref="Weight"/> property for the current document.
            /// Retrieves the value for the <see cref="Weight"/> property if its stored (using <paramref name="doc"/>)
            /// or if its indexed as <see cref="NumericDocValues"/> (using <paramref name="docId"/>) for the document.
            /// If no value is found, then the weight is 0.
            /// </summary>
            protected internal virtual long GetWeight(Document doc, int docId)
            {
                IndexableField weight = doc.GetField(outerInstance.weightField);

                if (weight != null) // found weight as stored
                {
                    // LUCENENET TODO: See if we can make NumericValue into Decimal (which can be converted to any other type of number)
                    // rather than using object.
                    return((weight.NumericValue != null) ? Convert.ToInt64(weight.NumericValue) : 0);
                } // found weight as NumericDocValue
                else if (weightValues != null)
                {
                    return(weightValues.Get(docId));
                } // fall back
                else
                {
                    return(0);
                }
            }
        public override void BeforeClass() // LUCENENET specific - renamed from BeforeClassSortingAtomicReaderTest() to ensure calling order vs base class
        {
            base.BeforeClass();

            // sort the index by id (as integer, in NUMERIC_DV_FIELD)
            Sort sort = new Sort(new SortField(NUMERIC_DV_FIELD, SortFieldType.INT32));

            Sorter.DocMap docMap = new Sorter(sort).Sort(reader);

            // Sorter.compute also sorts the values
            NumericDocValues dv = reader.GetNumericDocValues(NUMERIC_DV_FIELD);

            sortedValues = new int[reader.MaxDoc];
            for (int i = 0; i < reader.MaxDoc; ++i)
            {
                sortedValues[docMap.OldToNew(i)] = (int)dv.Get(i);
            }
            if (VERBOSE)
            {
                Console.WriteLine("docMap: " + docMap);
                Console.WriteLine("sortedValues: " + Arrays.ToString(sortedValues));
            }

            // sort the index by id (as integer, in NUMERIC_DV_FIELD)
            reader = SortingAtomicReader.Wrap(reader, sort);

            if (VERBOSE)
            {
                Console.WriteLine("mapped-deleted-docs: ");
                IBits mappedLiveDocs = reader.LiveDocs;
                for (int i = 0; i < mappedLiveDocs.Length; i++)
                {
                    if (!mappedLiveDocs.Get(i))
                    {
                        Console.WriteLine(i + " ");
                    }
                }
                Console.WriteLine();
            }

            TestUtil.CheckReader(reader);
        }
        public static void BeforeClassSortingAtomicReaderTest()
        {
            // sort the index by id (as integer, in NUMERIC_DV_FIELD)
            Sort sort = new Sort(new SortField(NUMERIC_DV_FIELD, SortField.Type_e.INT));

            Sorter.DocMap docMap = new Sorter(sort).Sort(reader);

            // Sorter.compute also sorts the values
            NumericDocValues dv = reader.GetNumericDocValues(NUMERIC_DV_FIELD);

            sortedValues = new int[reader.MaxDoc];
            for (int i = 0; i < reader.MaxDoc; ++i)
            {
                sortedValues[docMap.OldToNew(i)] = (int)dv.Get(i);
            }
            if (VERBOSE)
            {
                Console.WriteLine("docMap: " + docMap);
                Console.WriteLine("sortedValues: " + Arrays.ToString(sortedValues));
            }

            // sort the index by id (as integer, in NUMERIC_DV_FIELD)
            reader = SortingAtomicReader.Wrap(reader, sort);

            if (VERBOSE)
            {
                Console.WriteLine("mapped-deleted-docs: ");
                Bits mappedLiveDocs = reader.LiveDocs;
                for (int i = 0; i < mappedLiveDocs.Length(); i++)
                {
                    if (!mappedLiveDocs.Get(i))
                    {
                        Console.WriteLine(i + " ");
                    }
                }
                Console.WriteLine();
            }

            TestUtil.CheckReader(reader);
        }
Beispiel #12
0
        public virtual void TestBasics()
        {
            // sanity check of norms writer
            // TODO: generalize
            AtomicReader     slow     = SlowCompositeReaderWrapper.Wrap(reader);
            NumericDocValues fooNorms = slow.GetNormValues("foo");
            NumericDocValues barNorms = slow.GetNormValues("bar");

            for (int i = 0; i < slow.MaxDoc; i++)
            {
                Assert.IsFalse(fooNorms.Get(i) == barNorms.Get(i));
            }

            // sanity check of searching
            TopDocs foodocs = searcher.Search(new TermQuery(new Term("foo", "brown")), 10);

            Assert.IsTrue(foodocs.TotalHits > 0);
            TopDocs bardocs = searcher.Search(new TermQuery(new Term("bar", "brown")), 10);

            Assert.IsTrue(bardocs.TotalHits > 0);
            Assert.IsTrue(foodocs.ScoreDocs[0].Score < bardocs.ScoreDocs[0].Score);
        }
Beispiel #13
0
        private IEnumerable <long?> GetMergeNumericFieldEnumerable(FieldInfo fieldinfo, MergeState mergeState, IList <NumericDocValues> toMerge)
        {
            int              readerUpto      = -1;
            int              docIDUpto       = 0;
            AtomicReader     currentReader   = null;
            NumericDocValues currentValues   = null;
            Bits             currentLiveDocs = null;

            while (true)
            {
                if (readerUpto == toMerge.Count)
                {
                    yield break;
                }

                if (currentReader == null || docIDUpto == currentReader.MaxDoc)
                {
                    readerUpto++;
                    if (readerUpto < toMerge.Count)
                    {
                        currentReader   = mergeState.Readers[readerUpto];
                        currentValues   = toMerge[readerUpto];
                        currentLiveDocs = currentReader.LiveDocs;
                    }
                    docIDUpto = 0;
                    continue;
                }

                if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpto))
                {
                    yield return(currentValues.Get(docIDUpto++));

                    continue;
                }

                docIDUpto++;
            }
        }
Beispiel #14
0
        private Explanation ExplainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms)
        {
            Explanation result = new Explanation();

            result.Description = "score(doc=" + doc + ",freq=" + freq + "), product of:";

            Explanation boostExpl = new Explanation(stats.QueryBoost * stats.TopLevelBoost, "boost");

            if (boostExpl.Value != 1.0f)
            {
                result.AddDetail(boostExpl);
            }

            result.AddDetail(stats.Idf);

            Explanation tfNormExpl = new Explanation();

            tfNormExpl.Description = "tfNorm, computed from:";
            tfNormExpl.AddDetail(freq);
            tfNormExpl.AddDetail(new Explanation(k1, "parameter k1"));
            if (norms == null)
            {
                tfNormExpl.AddDetail(new Explanation(0, "parameter b (norms omitted for field)"));
                tfNormExpl.Value = (freq.Value * (k1 + 1)) / (freq.Value + k1);
            }
            else
            {
                float doclen = DecodeNormValue((byte)norms.Get(doc));
                tfNormExpl.AddDetail(new Explanation(b, "parameter b"));
                tfNormExpl.AddDetail(new Explanation(stats.Avgdl, "avgFieldLength"));
                tfNormExpl.AddDetail(new Explanation(doclen, "fieldLength"));
                tfNormExpl.Value = (freq.Value * (k1 + 1)) / (freq.Value + k1 * (1 - b + b * doclen / stats.Avgdl));
            }
            result.AddDetail(tfNormExpl);
            result.Value = boostExpl.Value * stats.Idf.Value * tfNormExpl.Value;
            return(result);
        }
Beispiel #15
0
        private Explanation ExplainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms)
        {
            Explanation result = new Explanation();

            result.Description = "score(doc=" + doc + ",freq=" + freq + "), product of:";

            // explain query weight
            Explanation queryExpl = new Explanation();

            queryExpl.Description = "queryWeight, product of:";

            Explanation boostExpl = new Explanation(stats.QueryBoost, "boost");

            if (stats.QueryBoost != 1.0f)
            {
                queryExpl.AddDetail(boostExpl);
            }
            queryExpl.AddDetail(stats.Idf);

            Explanation queryNormExpl = new Explanation(stats.QueryNorm, "queryNorm");

            queryExpl.AddDetail(queryNormExpl);

            queryExpl.Value = boostExpl.Value * stats.Idf.Value * queryNormExpl.Value;

            result.AddDetail(queryExpl);

            // explain field weight
            Explanation fieldExpl = new Explanation();

            fieldExpl.Description = "fieldWeight in " + doc + ", product of:";

            Explanation tfExplanation = new Explanation();

            tfExplanation.Value       = Tf(freq.Value);
            tfExplanation.Description = "tf(freq=" + freq.Value + "), with freq of:";
            tfExplanation.AddDetail(freq);
            fieldExpl.AddDetail(tfExplanation);
            fieldExpl.AddDetail(stats.Idf);

            Explanation fieldNormExpl = new Explanation();
            float       fieldNorm     = norms != null?DecodeNormValue(norms.Get(doc)) : 1.0f;

            fieldNormExpl.Value       = fieldNorm;
            fieldNormExpl.Description = "fieldNorm(doc=" + doc + ")";
            fieldExpl.AddDetail(fieldNormExpl);

            fieldExpl.Value = tfExplanation.Value * stats.Idf.Value * fieldNormExpl.Value;

            result.AddDetail(fieldExpl);

            // combine them
            result.Value = queryExpl.Value * fieldExpl.Value;

            if (queryExpl.Value == 1.0f)
            {
                return(fieldExpl);
            }

            return(result);
        }
Beispiel #16
0
            public override float Score(int doc, float freq)
            {
                float raw = outerInstance.Tf(freq) * weightValue;                                 // compute tf(f)*weight

                return(norms == null ? raw : raw *outerInstance.DecodeNormValue(norms.Get(doc))); // normalize for field
            }
Beispiel #17
0
 public override long Get(int docID)
 {
     return(@in.Get(docMap.NewToOld(docID)));
 }
 public override int GetOrd(int docID)
 {
     return((int)docToOrd.Get(docID));
 }
Beispiel #19
0
        private void DuellReaders(CompositeReader other, AtomicReader memIndexReader)
        {
            AtomicReader competitor = SlowCompositeReaderWrapper.Wrap(other);
            Fields       memFields  = memIndexReader.Fields;

            foreach (string field in competitor.Fields)
            {
                Terms memTerms = memFields.GetTerms(field);
                Terms iwTerms  = memIndexReader.GetTerms(field);
                if (iwTerms is null)
                {
                    assertNull(memTerms);
                }
                else
                {
                    NumericDocValues normValues    = competitor.GetNormValues(field);
                    NumericDocValues memNormValues = memIndexReader.GetNormValues(field);
                    if (normValues != null)
                    {
                        // mem idx always computes norms on the fly
                        assertNotNull(memNormValues);
                        assertEquals(normValues.Get(0), memNormValues.Get(0));
                    }

                    assertNotNull(memTerms);
                    assertEquals(iwTerms.DocCount, memTerms.DocCount);
                    assertEquals(iwTerms.SumDocFreq, memTerms.SumDocFreq);
                    assertEquals(iwTerms.SumTotalTermFreq, memTerms.SumTotalTermFreq);
                    TermsEnum iwTermsIter  = iwTerms.GetEnumerator();
                    TermsEnum memTermsIter = memTerms.GetEnumerator();
                    if (iwTerms.HasPositions)
                    {
                        bool offsets = iwTerms.HasOffsets && memTerms.HasOffsets;

                        while (iwTermsIter.MoveNext())
                        {
                            assertTrue(memTermsIter.MoveNext());
                            assertEquals(iwTermsIter.Term, memTermsIter.Term);
                            DocsAndPositionsEnum iwDocsAndPos  = iwTermsIter.DocsAndPositions(null, null);
                            DocsAndPositionsEnum memDocsAndPos = memTermsIter.DocsAndPositions(null, null);
                            while (iwDocsAndPos.NextDoc() != DocsAndPositionsEnum.NO_MORE_DOCS)
                            {
                                assertEquals(iwDocsAndPos.DocID, memDocsAndPos.NextDoc());
                                assertEquals(iwDocsAndPos.Freq, memDocsAndPos.Freq);
                                for (int i = 0; i < iwDocsAndPos.Freq; i++)
                                {
                                    assertEquals("term: " + iwTermsIter.Term.Utf8ToString(), iwDocsAndPos.NextPosition(), memDocsAndPos.NextPosition());
                                    if (offsets)
                                    {
                                        assertEquals(iwDocsAndPos.StartOffset, memDocsAndPos.StartOffset);
                                        assertEquals(iwDocsAndPos.EndOffset, memDocsAndPos.EndOffset);
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        while (iwTermsIter.MoveNext())
                        {
                            assertEquals(iwTermsIter.Term, memTermsIter.Term);
                            DocsEnum iwDocsAndPos  = iwTermsIter.Docs(null, null);
                            DocsEnum memDocsAndPos = memTermsIter.Docs(null, null);
                            while (iwDocsAndPos.NextDoc() != DocsAndPositionsEnum.NO_MORE_DOCS)
                            {
                                assertEquals(iwDocsAndPos.DocID, memDocsAndPos.NextDoc());
                                assertEquals(iwDocsAndPos.Freq, memDocsAndPos.Freq);
                            }
                        }
                    }
                }
            }
        }
Beispiel #20
0
        public virtual void TestNumericDocValuesField()
        {
            NumericDocValues dv = reader.GetNumericDocValues(NUMERIC_DV_FIELD);
            int maxDoc          = reader.MaxDoc;

            for (int i = 0; i < maxDoc; i++)
            {
                assertEquals("incorrect numeric DocValues for doc " + i, sortedValues[i], dv.Get(i));
            }
        }
 public override void SetDocument(int docID)
 {
     ordStart = ordUpto = (int)docToOrdAddress.Get(docID);
     ordLimit = (int)docToOrdAddress.Get(docID + 1);
 }
Beispiel #22
0
        public void Test()
        {
            RandomIndexWriter writer;
            DirectoryReader   indexReader;
            int numParents        = AtLeast(200);
            IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            cfg.SetMergePolicy(NewLogMergePolicy());
            using (writer = new RandomIndexWriter(Random(), NewDirectory(), cfg))
            {
                Document parentDoc = new Document();
                NumericDocValuesField parentVal = new NumericDocValuesField("parent_val", 0L);
                parentDoc.Add(parentVal);
                StringField parent = new StringField("parent", "true", Field.Store.YES);
                parentDoc.Add(parent);
                for (int i = 0; i < numParents; ++i)
                {
                    List <Document> documents   = new List <Document>();
                    int             numChildren = Random().nextInt(10);
                    for (int j = 0; j < numChildren; ++j)
                    {
                        Document childDoc = new Document();
                        childDoc.Add(new NumericDocValuesField("child_val", Random().nextInt(5)));
                        documents.Add(childDoc);
                    }
                    parentVal.SetInt64Value(Random().nextInt(50));
                    documents.Add(parentDoc);
                    writer.AddDocuments(documents);
                }
                writer.ForceMerge(1);
                indexReader = writer.Reader;
            }

            AtomicReader     reader        = GetOnlySegmentReader(indexReader);
            Filter           parentsFilter = new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("parent", "true"))));
            FixedBitSet      parentBits    = (FixedBitSet)parentsFilter.GetDocIdSet(reader.AtomicContext, null);
            NumericDocValues parentValues  = reader.GetNumericDocValues("parent_val");

            NumericDocValues childValues = reader.GetNumericDocValues("child_val");

            Sort parentSort = new Sort(new SortField("parent_val", SortFieldType.INT64));
            Sort childSort  = new Sort(new SortField("child_val", SortFieldType.INT64));

            Sort   sort   = new Sort(new SortField("custom", new BlockJoinComparerSource(parentsFilter, parentSort, childSort)));
            Sorter sorter = new Sorter(sort);

            Sorter.DocMap docMap = sorter.Sort(reader);
            assertEquals(reader.MaxDoc, docMap.Count);

            int[] children       = new int[1];
            int   numChildren2   = 0;
            int   previousParent = -1;

            for (int i = 0; i < docMap.Count; ++i)
            {
                int oldID = docMap.NewToOld(i);
                if (parentBits.Get(oldID))
                {
                    // check that we have the right children
                    for (int j = 0; j < numChildren2; ++j)
                    {
                        assertEquals(oldID, parentBits.NextSetBit(children[j]));
                    }
                    // check that children are sorted
                    for (int j = 1; j < numChildren2; ++j)
                    {
                        int doc1 = children[j - 1];
                        int doc2 = children[j];
                        if (childValues.Get(doc1) == childValues.Get(doc2))
                        {
                            assertTrue(doc1 < doc2); // sort is stable
                        }
                        else
                        {
                            assertTrue(childValues.Get(doc1) < childValues.Get(doc2));
                        }
                    }
                    // check that parents are sorted
                    if (previousParent != -1)
                    {
                        if (parentValues.Get(previousParent) == parentValues.Get(oldID))
                        {
                            assertTrue(previousParent < oldID);
                        }
                        else
                        {
                            assertTrue(parentValues.Get(previousParent) < parentValues.Get(oldID));
                        }
                    }
                    // reset
                    previousParent = oldID;
                    numChildren2   = 0;
                }
                else
                {
                    children = ArrayUtil.Grow(children, numChildren2 + 1);
                    children[numChildren2++] = oldID;
                }
            }
            indexReader.Dispose();
            writer.w.Directory.Dispose();
        }
Beispiel #23
0
 /// <summary>
 /// NOTE: This was floatVal() in Lucene
 /// </summary>
 public override float SingleVal(int doc)
 {
     return(similarity.DecodeNormValue(norms.Get(doc)));
 }
Beispiel #24
0
        private Explanation ExplainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms)
        {
            Explanation result = new Explanation();

            // LUCENENET specific - using freq.Value is a change that was made in Lucene 5.0, but is included
            // in 4.8.0 to remove annoying newlines from the output.
            // See: https://github.com/apache/lucene-solr/commit/f0bfcbc7d8fbc5bb2791da60af559e8b0ad6eed6
            result.Description = "score(doc=" + doc + ",freq=" + freq.Value + "), product of:";

            // explain query weight
            Explanation queryExpl = new Explanation();

            queryExpl.Description = "queryWeight, product of:";

            Explanation boostExpl = new Explanation(stats.QueryBoost, "boost");

            if (stats.QueryBoost != 1.0f)
            {
                queryExpl.AddDetail(boostExpl);
            }
            queryExpl.AddDetail(stats.Idf);

            Explanation queryNormExpl = new Explanation(stats.QueryNorm, "queryNorm");

            queryExpl.AddDetail(queryNormExpl);

            queryExpl.Value = boostExpl.Value * stats.Idf.Value * queryNormExpl.Value;

            result.AddDetail(queryExpl);

            // explain field weight
            Explanation fieldExpl = new Explanation();

            fieldExpl.Description = "fieldWeight in " + doc + ", product of:";

            Explanation tfExplanation = new Explanation();

            tfExplanation.Value       = Tf(freq.Value);
            tfExplanation.Description = "tf(freq=" + freq.Value + "), with freq of:";
            tfExplanation.AddDetail(freq);
            fieldExpl.AddDetail(tfExplanation);
            fieldExpl.AddDetail(stats.Idf);

            Explanation fieldNormExpl = new Explanation();
            float       fieldNorm     = norms != null?DecodeNormValue(norms.Get(doc)) : 1.0f;

            fieldNormExpl.Value       = fieldNorm;
            fieldNormExpl.Description = "fieldNorm(doc=" + doc + ")";
            fieldExpl.AddDetail(fieldNormExpl);

            fieldExpl.Value = tfExplanation.Value * stats.Idf.Value * fieldNormExpl.Value;

            result.AddDetail(fieldExpl);

            // combine them
            result.Value = queryExpl.Value * fieldExpl.Value;

            if (queryExpl.Value == 1.0f)
            {
                return(fieldExpl);
            }

            return(result);
        }