protected override void Dispose(bool disposing) { if (disposing) { try { try { if (indexReader != null) { indexReader.Dispose(); } } finally { // null so if an app hangs on to us (ie, we are not // GCable, despite being closed) we still free most // ram indexReader = null; if (input != null) { input.Dispose(); } } } finally { if (postingsReader != null) { postingsReader.Dispose(); } } } }
public override void Dispose() { try { try { if (_indexReader != null) { _indexReader.Dispose(); } } finally { // null so if an app hangs on to us (ie, we are not // GCable, despite being closed) we still free most // ram _indexReader = null; if (_input != null) { _input.Dispose(); } } } finally { if (_postingsReader != null) { _postingsReader.Dispose(); } } }
// private string segment; public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, SegmentInfo info, PostingsReaderBase postingsReader, IOContext context, string segmentSuffix) { this.postingsReader = postingsReader; // this.segment = segment; input = dir.OpenInput(IndexFileNames.SegmentFileName(info.Name, segmentSuffix, BlockTermsWriter.TERMS_EXTENSION), context); bool success = false; try { version = ReadHeader(input); // Have PostingsReader init itself postingsReader.Init(input); // Read per-field details SeekDir(input, dirOffset); int numFields = input.ReadVInt32(); if (numFields < 0) { throw new CorruptIndexException("invalid number of fields: " + numFields + " (resource=" + input + ")"); } for (int i = 0; i < numFields; i++) { int field = input.ReadVInt32(); long numTerms = input.ReadVInt64(); Debug.Assert(numTerms >= 0); long termsStartPointer = input.ReadVInt64(); FieldInfo fieldInfo = fieldInfos.FieldInfo(field); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : input.ReadVInt64(); long sumDocFreq = input.ReadVInt64(); int docCount = input.ReadVInt32(); int longsSize = version >= BlockTermsWriter.VERSION_META_ARRAY ? input.ReadVInt32() : 0; if (docCount < 0 || docCount > info.DocCount) { // #docs with field must be <= #docs throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + info.DocCount + " (resource=" + input + ")"); } if (sumDocFreq < docCount) { // #postings must be >= #docs with field throw new CorruptIndexException("invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount + " (resource=" + input + ")"); } if (sumTotalTermFreq != -1 && sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings throw new CorruptIndexException("invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq + " (resource=" + input + ")"); } FieldReader previous = fields.Put(fieldInfo.Name, new FieldReader(this, fieldInfo, numTerms, termsStartPointer, sumTotalTermFreq, sumDocFreq, docCount, longsSize)); if (previous != null) { throw new CorruptIndexException("duplicate fields: " + fieldInfo.Name + " (resource=" + input + ")"); } } success = true; } finally { if (!success) { input.Dispose(); } } this.indexReader = indexReader; }
public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, SegmentInfo info, PostingsReaderBase postingsReader, IOContext context, String segmentSuffix) { _postingsReader = postingsReader; _input = dir.OpenInput( IndexFileNames.SegmentFileName(info.Name, segmentSuffix, BlockTermsWriter.TERMS_EXTENSION), context); var success = false; try { _version = ReadHeader(_input); // Have PostingsReader init itself postingsReader.Init(_input); // Read per-field details SeekDir(_input, _dirOffset); int numFields = _input.ReadVInt(); if (numFields < 0) { throw new CorruptIndexException(String.Format("Invalid number of fields: {0}, Resource: {1}", numFields, _input)); } for (var i = 0; i < numFields; i++) { var field = _input.ReadVInt(); var numTerms = _input.ReadVLong(); Debug.Assert(numTerms >= 0); var termsStartPointer = _input.ReadVLong(); var fieldInfo = fieldInfos.FieldInfo(field); var sumTotalTermFreq = fieldInfo.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_ONLY ? -1 : _input.ReadVLong(); var sumDocFreq = _input.ReadVLong(); var docCount = _input.ReadVInt(); var longsSize = _version >= BlockTermsWriter.VERSION_META_ARRAY ? _input.ReadVInt() : 0; if (docCount < 0 || docCount > info.DocCount) { // #docs with field must be <= #docs throw new CorruptIndexException( String.Format("Invalid DocCount: {0}, MaxDoc: {1}, Resource: {2}", docCount, info.DocCount, _input)); } if (sumDocFreq < docCount) { // #postings must be >= #docs with field throw new CorruptIndexException( String.Format("Invalid sumDocFreq: {0}, DocCount: {1}, Resource: {2}", sumDocFreq, docCount, _input)); } if (sumTotalTermFreq != -1 && sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings throw new CorruptIndexException( String.Format("Invalid sumTotalTermFreq: {0}, sumDocFreq: {1}, Resource: {2}", sumTotalTermFreq, sumDocFreq, _input)); } try { _fields.Add(fieldInfo.Name, new FieldReader(fieldInfo, this, numTerms, termsStartPointer, sumTotalTermFreq, sumDocFreq, docCount, longsSize)); } catch (ArgumentException) { throw new CorruptIndexException(String.Format("Duplicate fields: {0}, Resource: {1}", fieldInfo.Name, _input)); } } success = true; } finally { if (!success) { _input.Dispose(); } } _indexReader = indexReader; }
public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, SegmentInfo info, PostingsReaderBase postingsReader, IOContext context, String segmentSuffix) { _postingsReader = postingsReader; _input = dir.OpenInput( IndexFileNames.SegmentFileName(info.Name, segmentSuffix, BlockTermsWriter.TERMS_EXTENSION), context); var success = false; try { _version = ReadHeader(_input); // Have PostingsReader init itself postingsReader.Init(_input); // Read per-field details SeekDir(_input, _dirOffset); int numFields = _input.ReadVInt(); if (numFields < 0) { throw new CorruptIndexException(String.Format("Invalid number of fields: {0}, Resource: {1}", numFields, _input)); } for (var i = 0; i < numFields; i++) { var field = _input.ReadVInt(); var numTerms = _input.ReadVLong(); Debug.Assert(numTerms >= 0); var termsStartPointer = _input.ReadVLong(); var fieldInfo = fieldInfos.FieldInfo(field); var sumTotalTermFreq = fieldInfo.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_ONLY ? -1 : _input.ReadVLong(); var sumDocFreq = _input.ReadVLong(); var docCount = _input.ReadVInt(); var longsSize = _version >= BlockTermsWriter.VERSION_META_ARRAY ? _input.ReadVInt() : 0; if (docCount < 0 || docCount > info.DocCount) { // #docs with field must be <= #docs throw new CorruptIndexException( String.Format("Invalid DocCount: {0}, MaxDoc: {1}, Resource: {2}", docCount, info.DocCount, _input)); } if (sumDocFreq < docCount) { // #postings must be >= #docs with field throw new CorruptIndexException( String.Format("Invalid sumDocFreq: {0}, DocCount: {1}, Resource: {2}", sumDocFreq, docCount, _input)); } if (sumTotalTermFreq != -1 && sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings throw new CorruptIndexException( String.Format("Invalid sumTotalTermFreq: {0}, sumDocFreq: {1}, Resource: {2}", sumTotalTermFreq, sumDocFreq, _input)); } try { _fields.Add(fieldInfo.Name, new FieldReader(fieldInfo, this, numTerms, termsStartPointer, sumTotalTermFreq, sumDocFreq, docCount, longsSize)); } catch (ArgumentException) { throw new CorruptIndexException(String.Format("Duplicate fields: {0}, Resource: {1}", fieldInfo.Name, _input)); } } success = true; } finally { if (!success) { _input.Dispose(); } } _indexReader = indexReader; }
public override void Dispose() { try { try { if (_indexReader != null) _indexReader.Dispose(); } finally { // null so if an app hangs on to us (ie, we are not // GCable, despite being closed) we still free most // ram _indexReader = null; if (_input != null) _input.Dispose(); } } finally { if (_postingsReader != null) _postingsReader.Dispose(); } }