public FieldReader(BlockTermsReader outerInstance, FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { Debug.Assert(numTerms > 0); this.outerInstance = outerInstance; this.fieldInfo = fieldInfo; this.numTerms = numTerms; this.termsStartPointer = termsStartPointer; this.sumTotalTermFreq = sumTotalTermFreq; this.sumDocFreq = sumDocFreq; this.docCount = docCount; this.longsSize = longsSize; }
public FieldReader(FieldInfo fieldInfo, BlockTermsReader blockTermsReader, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { Debug.Assert(numTerms > 0); _blockTermsReader = blockTermsReader; _fieldInfo = fieldInfo; _numTerms = numTerms; _termsStartPointer = termsStartPointer; _sumTotalTermFreq = sumTotalTermFreq; _sumDocFreq = sumDocFreq; _docCount = docCount; _longsSize = longsSize; }
public SegmentTermsEnum(FieldReader fieldReader, BlockTermsReader blockTermsReader) { _fieldReader = fieldReader; _blockTermsReader = blockTermsReader; _input = (IndexInput)_blockTermsReader._input.Clone(); _input.Seek(_fieldReader._termsStartPointer); _indexEnum = _blockTermsReader._indexReader.GetFieldEnum(_fieldReader._fieldInfo); _doOrd = _blockTermsReader._indexReader.SupportsOrd; _fieldTerm.Field = _fieldReader._fieldInfo.Name; _state = _blockTermsReader._postingsReader.NewTermState(); _state.TotalTermFreq = -1; _state.Ord = -1; _termSuffixes = new byte[128]; _docFreqBytes = new byte[64]; _longs = new long[_fieldReader._longsSize]; }
public SegmentTermsEnum(FieldReader fieldReader, BlockTermsReader blockTermsReader) { _fieldReader = fieldReader; _blockTermsReader = blockTermsReader; _input = (IndexInput) _blockTermsReader._input.Clone(); _input.Seek(_fieldReader._termsStartPointer); _indexEnum = _blockTermsReader._indexReader.GetFieldEnum(_fieldReader._fieldInfo); _doOrd = _blockTermsReader._indexReader.SupportsOrd; _fieldTerm.Field = _fieldReader._fieldInfo.Name; _state = _blockTermsReader._postingsReader.NewTermState(); _state.TotalTermFreq = -1; _state.Ord = -1; _termSuffixes = new byte[128]; _docFreqBytes = new byte[64]; _longs = new long[_fieldReader._longsSize]; }
public FieldReader(FieldInfo fieldInfo, BlockTermsReader blockTermsReader, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { Debug.Assert(numTerms > 0); _blockTermsReader = blockTermsReader; _fieldInfo = fieldInfo; _numTerms = numTerms; _termsStartPointer = termsStartPointer; _sumTotalTermFreq = sumTotalTermFreq; _sumDocFreq = sumDocFreq; _docCount = docCount; _longsSize = longsSize; }
public override FieldsProducer FieldsProducer(SegmentReadState state) { PostingsReaderBase postingsReader = new SepPostingsReader(state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, new MockIntFactory(baseBlockSize), state.SegmentSuffix); TermsIndexReaderBase indexReader; bool success = false; try { indexReader = new FixedGapTermsIndexReader(state.Directory, state.FieldInfos, state.SegmentInfo.Name, state.TermsIndexDivisor, BytesRef.UTF8SortedAsUnicodeComparer, state.SegmentSuffix, state.Context); success = true; } finally { if (!success) { postingsReader.Dispose(); } } success = false; try { FieldsProducer ret = new BlockTermsReader(indexReader, state.Directory, state.FieldInfos, state.SegmentInfo, postingsReader, state.Context, state.SegmentSuffix); success = true; return ret; } finally { if (!success) { try { postingsReader.Dispose(); } finally { indexReader.Dispose(); } } } }