public override void FinishTerm()
 {
     Debug.Assert(PositionCount == 0);
     Debug.Assert(DocStatus == Status.STARTED);
     Debug.Assert(FieldStatus == Status.STARTED);
     Debug.Assert(TermStatus == Status.STARTED);
     @in.FinishTerm();
     TermStatus = Status.FINISHED;
     --TermCount;
 }
 public override void FinishTerm()
 {
     Debug.Assert(positionCount == 0);
     Debug.Assert(docStatus == Status.STARTED);
     Debug.Assert(fieldStatus == Status.STARTED);
     Debug.Assert(termStatus == Status.STARTED);
     @in.FinishTerm();
     termStatus = Status.FINISHED;
     --termCount;
 }
 public override void FinishTerm()
 {
     if (Debugging.AssertsEnabled)
     {
         Debugging.Assert(positionCount == 0);
     }
     if (Debugging.AssertsEnabled)
     {
         Debugging.Assert(docStatus == Status.STARTED);
     }
     if (Debugging.AssertsEnabled)
     {
         Debugging.Assert(fieldStatus == Status.STARTED);
     }
     if (Debugging.AssertsEnabled)
     {
         Debugging.Assert(termStatus == Status.STARTED);
     }
     @in.FinishTerm();
     termStatus = Status.FINISHED;
     --termCount;
 }
Пример #4
0
        internal void FinishDocument()
        {
            Debug.Assert(docState.TestPoint("TermVectorsTermsWriterPerField.finish start"));

            int numPostings = termsHashPerField.bytesHash.Count;

            BytesRef flushTerm = termsWriter.flushTerm;

            Debug.Assert(numPostings >= 0);

            if (numPostings > maxNumPostings)
            {
                maxNumPostings = numPostings;
            }

            // this is called once, after inverting all occurrences
            // of a given field in the doc.  At this point we flush
            // our hash into the DocWriter.

            Debug.Assert(termsWriter.VectorFieldsInOrder(fieldInfo));

            TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray;
            TermVectorsWriter        tv       = termsWriter.writer;

            int[] termIDs = termsHashPerField.SortPostings(tv.Comparer);

            tv.StartField(fieldInfo, numPostings, doVectorPositions, doVectorOffsets, hasPayloads);

            ByteSliceReader posReader = doVectorPositions ? termsWriter.vectorSliceReaderPos : null;
            ByteSliceReader offReader = doVectorOffsets ? termsWriter.vectorSliceReaderOff : null;

            ByteBlockPool termBytePool = termsHashPerField.termBytePool;

            for (int j = 0; j < numPostings; j++)
            {
                int termID = termIDs[j];
                int freq   = postings.freqs[termID];

                // Get BytesRef
                termBytePool.SetBytesRef(flushTerm, postings.textStarts[termID]);
                tv.StartTerm(flushTerm, freq);

                if (doVectorPositions || doVectorOffsets)
                {
                    if (posReader != null)
                    {
                        termsHashPerField.InitReader(posReader, termID, 0);
                    }
                    if (offReader != null)
                    {
                        termsHashPerField.InitReader(offReader, termID, 1);
                    }
                    tv.AddProx(freq, posReader, offReader);
                }
                tv.FinishTerm();
            }
            tv.FinishField();

            termsHashPerField.Reset();

            fieldInfo.SetStoreTermVectors();
        }