public static void ReadLegacyInfos(SegmentInfos infos, Directory directory, IndexInput input, int format) { infos.Version = input.ReadInt64(); // read version infos.Counter = input.ReadInt32(); // read counter Lucene3xSegmentInfoReader reader = new Lucene3xSegmentInfoReader(); for (int i = input.ReadInt32(); i > 0; i--) // read segmentInfos { SegmentCommitInfo siPerCommit = reader.ReadLegacySegmentInfo(directory, format, input); SegmentInfo si = siPerCommit.Info; if (si.Version == null) { // Could be a 3.0 - try to open the doc stores - if it fails, it's a // 2.x segment, and an IndexFormatTooOldException will be thrown, // which is what we want. Directory dir = directory; if (Lucene3xSegmentInfoFormat.GetDocStoreOffset(si) != -1) { if (Lucene3xSegmentInfoFormat.GetDocStoreIsCompoundFile(si)) { dir = new CompoundFileDirectory(dir, IndexFileNames.SegmentFileName(Lucene3xSegmentInfoFormat.GetDocStoreSegment(si), "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION), IOContext.READ_ONCE, false); } } else if (si.UseCompoundFile) { dir = new CompoundFileDirectory(dir, IndexFileNames.SegmentFileName(si.Name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), IOContext.READ_ONCE, false); } try { Lucene3xStoredFieldsReader.CheckCodeVersion(dir, Lucene3xSegmentInfoFormat.GetDocStoreSegment(si)); } finally { // If we opened the directory, close it if (dir != directory) { dir.Dispose(); } } // Above call succeeded, so it's a 3.0 segment. Upgrade it so the next // time the segment is read, its version won't be null and we won't // need to open FieldsReader every time for each such segment. si.Version = "3.0"; } else if (si.Version.Equals("2.x", StringComparison.Ordinal)) { // If it's a 3x index touched by 3.1+ code, then segments record their // version, whether they are 2.x ones or not. We detect that and throw // appropriate exception. throw new IndexFormatTooOldException("segment " + si.Name + " in resource " + input, si.Version); } infos.Add(siPerCommit); } infos.UserData = input.ReadStringStringMap(); }
public override void Init(IndexInput termsIn) { // Make sure we are talking to the matching past writer CodecUtil.CheckHeader(termsIn, SepPostingsWriter.CODEC, SepPostingsWriter.VERSION_START, SepPostingsWriter.VERSION_START); skipInterval = termsIn.ReadInt32(); maxSkipLevels = termsIn.ReadInt32(); skipMinimum = termsIn.ReadInt32(); }
/// <summary> /// Read as a d-gaps list. </summary> private void ReadSetDgaps(IndexInput input) { size = input.ReadInt32(); // (re)read size count = input.ReadInt32(); // read count bits = new byte[GetNumBytes(size)]; // allocate bits int last = 0; int n = Count(); while (n > 0) { last += input.ReadVInt32(); bits[last] = input.ReadByte(); n -= BitUtil.BitCount(bits[last]); Debug.Assert(n >= 0); } }
public override Int32IndexInput OpenInput(Directory dir, string fileName, IOContext context) { IndexInput input = dir.OpenInput(fileName, context); int baseBlockSize = input.ReadInt32(); return(new VariableInt32BlockIndexInputAnonymousClass(input, baseBlockSize)); }
private static void ValidateFooter(IndexInput @in) { int magic = @in.ReadInt32(); if (magic != FOOTER_MAGIC) { throw new IOException("codec footer mismatch: actual footer=" + magic + " vs expected footer=" + FOOTER_MAGIC + " (resource: " + @in + ")"); } int algorithmID = @in.ReadInt32(); if (algorithmID != 0) { throw new IOException("codec footer mismatch: unknown algorithmID: " + algorithmID); } }
private BinaryDocValues LoadBytesFixedStraight(FieldInfo field) { string fileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name + "_" + Convert.ToString(field.Number, CultureInfo.InvariantCulture), segmentSuffix, "dat"); IndexInput input = dir.OpenInput(fileName, state.Context); bool success = false; try { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_CODEC_NAME, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_VERSION_START, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_VERSION_CURRENT); int fixedLength = input.ReadInt32(); var bytes = new PagedBytes(16); bytes.Copy(input, fixedLength * (long)state.SegmentInfo.DocCount); PagedBytes.Reader bytesReader = bytes.Freeze(true); CodecUtil.CheckEOF(input); success = true; ramBytesUsed.AddAndGet(bytes.RamBytesUsed()); return(new BinaryDocValuesAnonymousClass(fixedLength, bytesReader)); } finally { if (success) { IOUtils.Dispose(input); } else { IOUtils.DisposeWhileHandlingException(input); } } }
public SegmentTermEnum(IndexInput i, FieldInfos fis, bool isi) { input = i; fieldInfos = fis; isIndex = isi; maxSkipLevels = 1; // use single-level skip lists for formats > -3 int firstInt = input.ReadInt32(); if (firstInt >= 0) { // original-format file, without explicit format version number format = 0; size = firstInt; // back-compatible settings indexInterval = 128; skipInterval = int.MaxValue; // switch off skipTo optimization } else { // we have a format version number format = firstInt; // check that it is a format we can understand if (format > FORMAT_MINIMUM) { throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT); } if (format < FORMAT_CURRENT) { throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT); } size = input.ReadInt64(); // read the size indexInterval = input.ReadInt32(); skipInterval = input.ReadInt32(); maxSkipLevels = input.ReadInt32(); if (Debugging.AssertsEnabled) { Debugging.Assert(indexInterval > 0, "indexInterval={0} is negative; must be > 0", indexInterval); Debugging.Assert(skipInterval > 0, "skipInterval={0} is negative; must be > 0", skipInterval); } } }
private void ReadField(StoredFieldVisitor visitor, FieldInfo info, int bits) { int numeric = bits & Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK; if (numeric != 0) { switch (numeric) { case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_INT: visitor.Int32Field(info, fieldsStream.ReadInt32()); return; case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_LONG: visitor.Int64Field(info, fieldsStream.ReadInt64()); return; case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_FLOAT: visitor.SingleField(info, J2N.BitConversion.Int32BitsToSingle(fieldsStream.ReadInt32())); return; case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_DOUBLE: visitor.DoubleField(info, J2N.BitConversion.Int64BitsToDouble(fieldsStream.ReadInt64())); return; default: throw new CorruptIndexException("Invalid numeric type: " + numeric.ToString("x")); } } else { int length = fieldsStream.ReadVInt32(); var bytes = new byte[length]; fieldsStream.ReadBytes(bytes, 0, length); if ((bits & Lucene40StoredFieldsWriter.FIELD_IS_BINARY) != 0) { visitor.BinaryField(info, bytes); } else { #pragma warning disable 612, 618 visitor.StringField(info, IOUtils.CHARSET_UTF_8.GetString(bytes)); #pragma warning restore 612, 618 } } }
private BinaryEntry ReadBinaryEntry(IndexInput meta) { var entry = new BinaryEntry(); entry.offset = meta.ReadInt64(); entry.numBytes = meta.ReadInt32(); entry.count = meta.ReadInt32(); entry.missingOffset = meta.ReadInt64(); if (entry.missingOffset != -1) { entry.missingBytes = meta.ReadInt64(); } else { entry.missingBytes = 0; } return(entry); }
/// <summary> /// NOTE: This was loadFloatField() in Lucene. /// </summary> private NumericDocValues LoadSingleField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.FLOATS_CODEC_NAME, Lucene40DocValuesFormat.FLOATS_VERSION_START, Lucene40DocValuesFormat.FLOATS_VERSION_CURRENT); int valueSize = input.ReadInt32(); if (valueSize != 4) { throw new CorruptIndexException("invalid valueSize: " + valueSize); } int maxDoc = state.SegmentInfo.DocCount; int[] values = new int[maxDoc]; for (int i = 0; i < values.Length; i++) { values[i] = input.ReadInt32(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousClass7(values)); }
private static BinaryEntry ReadBinaryEntry(IndexInput meta) // LUCENENET: CA1822: Mark members as static { var entry = new BinaryEntry(); entry.offset = meta.ReadInt64(); entry.numBytes = meta.ReadInt32(); entry.count = meta.ReadInt32(); entry.missingOffset = meta.ReadInt64(); if (entry.missingOffset != -1) { entry.missingBytes = meta.ReadInt64(); } else { entry.missingBytes = 0; } return(entry); }
/// <summary> /// Read as a d-gaps cleared bits list. </summary> private void ReadClearedDgaps(IndexInput input) { size = input.ReadInt32(); // (re)read size count = input.ReadInt32(); // read count bits = new byte[GetNumBytes(size)]; // allocate bits for (int i = 0; i < bits.Length; ++i) { bits[i] = 0xff; } ClearUnusedBits(); int last = 0; int numCleared = Length - Count(); while (numCleared > 0) { last += input.ReadVInt32(); bits[last] = input.ReadByte(); numCleared -= 8 - BitUtil.BitCount(bits[last]); Debug.Assert(numCleared >= 0 || (last == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); } }
private int CheckValidFormat(IndexInput @in) { int format = @in.ReadInt32(); if (format < FORMAT_MINIMUM) { throw new IndexFormatTooOldException(@in, format, FORMAT_MINIMUM, FORMAT_CURRENT); } if (format > FORMAT_CURRENT) { throw new IndexFormatTooNewException(@in, format, FORMAT_MINIMUM, FORMAT_CURRENT); } return(format); }
public virtual void TestReadNestedCFP() { Directory newDir = NewDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), true); CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", NewIOContext(Random), true); IndexOutput @out = nested.CreateOutput("b.xyz", NewIOContext(Random)); IndexOutput out1 = nested.CreateOutput("b_1.xyz", NewIOContext(Random)); @out.WriteInt32(0); out1.WriteInt32(1); @out.Dispose(); out1.Dispose(); nested.Dispose(); newDir.Copy(csw, "b.cfs", "b.cfs", NewIOContext(Random)); newDir.Copy(csw, "b.cfe", "b.cfe", NewIOContext(Random)); newDir.DeleteFile("b.cfs"); newDir.DeleteFile("b.cfe"); csw.Dispose(); Assert.AreEqual(2, newDir.ListAll().Length); csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), false); Assert.AreEqual(2, csw.ListAll().Length); nested = new CompoundFileDirectory(csw, "b.cfs", NewIOContext(Random), false); Assert.AreEqual(2, nested.ListAll().Length); IndexInput openInput = nested.OpenInput("b.xyz", NewIOContext(Random)); Assert.AreEqual(0, openInput.ReadInt32()); openInput.Dispose(); openInput = nested.OpenInput("b_1.xyz", NewIOContext(Random)); Assert.AreEqual(1, openInput.ReadInt32()); openInput.Dispose(); nested.Dispose(); csw.Dispose(); newDir.Dispose(); }
private NumericDocValues LoadByteField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_START, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); int valueSize = input.ReadInt32(); if (valueSize != 1) { throw new CorruptIndexException("invalid valueSize: " + valueSize); } int maxDoc = state.SegmentInfo.DocCount; var values = new byte[maxDoc]; input.ReadBytes(values, 0, values.Length); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousClass3(values)); }
public virtual void TestAppend() { Directory newDir = NewDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), true); int size = 5 + Random.Next(128); for (int j = 0; j < 2; j++) { IndexOutput os = csw.CreateOutput("seg_" + j + "_foo.txt", NewIOContext(Random)); for (int i = 0; i < size; i++) { os.WriteInt32(i * j); } os.Dispose(); string[] listAll = newDir.ListAll(); Assert.AreEqual(1, listAll.Length); Assert.AreEqual("d.cfs", listAll[0]); } CreateSequenceFile(Dir, "d1", (sbyte)0, 15); Dir.Copy(csw, "d1", "d1", NewIOContext(Random)); string[] listAll_ = newDir.ListAll(); Assert.AreEqual(1, listAll_.Length); Assert.AreEqual("d.cfs", listAll_[0]); csw.Dispose(); CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), false); for (int j = 0; j < 2; j++) { IndexInput openInput = csr.OpenInput("seg_" + j + "_foo.txt", NewIOContext(Random)); Assert.AreEqual(size * 4, openInput.Length); for (int i = 0; i < size; i++) { Assert.AreEqual(i * j, openInput.ReadInt32()); } openInput.Dispose(); } IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random)); IndexInput actual = csr.OpenInput("d1", NewIOContext(Random)); AssertSameStreams("d1", expected, actual); AssertSameSeekBehavior("d1", expected, actual); expected.Dispose(); actual.Dispose(); csr.Dispose(); newDir.Dispose(); }
private SortedDocValues LoadBytesFixedSorted(/*FieldInfo field, // LUCENENET: Never read */ IndexInput data, IndexInput index) { CodecUtil.CheckHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_START, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT); CodecUtil.CheckHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_START, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT); int fixedLength = data.ReadInt32(); int valueCount = index.ReadInt32(); PagedBytes bytes = new PagedBytes(16); bytes.Copy(data, fixedLength * (long)valueCount); PagedBytes.Reader bytesReader = bytes.Freeze(true); PackedInt32s.Reader reader = PackedInt32s.GetReader(index); ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + reader.RamBytesUsed()); return(CorrectBuggyOrds(new SortedDocValuesAnonymousClass(fixedLength, valueCount, bytesReader, reader))); }
private NumericEntry ReadNumericEntry(IndexInput meta) { var entry = new NumericEntry { offset = meta.ReadInt64(), count = meta.ReadInt32(), missingOffset = meta.ReadInt64() }; if (entry.missingOffset != -1) { entry.missingBytes = meta.ReadInt64(); } else { entry.missingBytes = 0; } entry.byteWidth = meta.ReadByte(); return(entry); }
private static NumericEntry ReadNumericEntry(IndexInput meta) // LUCENENET: CA1822: Mark members as static { var entry = new NumericEntry { offset = meta.ReadInt64(), count = meta.ReadInt32(), missingOffset = meta.ReadInt64() }; if (entry.missingOffset != -1) { entry.missingBytes = meta.ReadInt64(); } else { entry.missingBytes = 0; } entry.byteWidth = meta.ReadByte(); return(entry); }
private NumericDocValues LoadDoubleField(FieldInfo field, IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.FLOATS_CODEC_NAME, Lucene40DocValuesFormat.FLOATS_VERSION_START, Lucene40DocValuesFormat.FLOATS_VERSION_CURRENT); int valueSize = input.ReadInt32(); if (valueSize != 8) { throw new CorruptIndexException("invalid valueSize: " + valueSize); } int maxDoc = state.SegmentInfo.DocCount; long[] values = new long[maxDoc]; for (int i = 0; i < values.Length; i++) { values[i] = input.ReadInt64(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousInnerClassHelper8(values)); }
private SegmentInfo ReadUpgradedSegmentInfo(string name, Directory dir, IndexInput input) { CodecUtil.CheckHeader(input, Lucene3xSegmentInfoFormat.UPGRADED_SI_CODEC_NAME, Lucene3xSegmentInfoFormat.UPGRADED_SI_VERSION_START, Lucene3xSegmentInfoFormat.UPGRADED_SI_VERSION_CURRENT); string version = input.ReadString(); int docCount = input.ReadInt32(); IDictionary <string, string> attributes = input.ReadStringStringMap(); bool isCompoundFile = input.ReadByte() == SegmentInfo.YES; IDictionary <string, string> diagnostics = input.ReadStringStringMap(); ISet <string> files = input.ReadStringSet(); SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, null, diagnostics, Collections.UnmodifiableMap(attributes)); info.SetFiles(files); return(info); }
public override SegmentInfo Read(Directory dir, string segment, IOContext context) { string fileName = IndexFileNames.SegmentFileName(segment, "", Lucene40SegmentInfoFormat.SI_EXTENSION); IndexInput input = dir.OpenInput(fileName, context); bool success = false; try { CodecUtil.CheckHeader(input, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_START, Lucene40SegmentInfoFormat.VERSION_CURRENT); string version = input.ReadString(); int docCount = input.ReadInt32(); if (docCount < 0) { throw new CorruptIndexException("invalid docCount: " + docCount + " (resource=" + input + ")"); } bool isCompoundFile = input.ReadByte() == SegmentInfo.YES; IDictionary <string, string> diagnostics = input.ReadStringStringMap(); input.ReadStringStringMap(); // read deprecated attributes ISet <string> files = input.ReadStringSet(); CodecUtil.CheckEOF(input); SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics); si.SetFiles(files); success = true; return(si); } finally { if (!success) { IOUtils.CloseWhileHandlingException(input); } else { input.Dispose(); } } }
private void CheckHeaders(Directory dir) { foreach (string file in dir.ListAll()) { if (file.Equals(IndexWriter.WRITE_LOCK_NAME, StringComparison.Ordinal)) { continue; // write.lock has no header, thats ok } if (file.Equals(IndexFileNames.SEGMENTS_GEN, StringComparison.Ordinal)) { continue; // segments.gen has no header, thats ok } if (file.EndsWith(IndexFileNames.COMPOUND_FILE_EXTENSION, StringComparison.Ordinal)) { CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, NewIOContext(Random()), false); CheckHeaders(cfsDir); // recurse into cfs cfsDir.Dispose(); } IndexInput @in = null; bool success = false; try { @in = dir.OpenInput(file, NewIOContext(Random())); int val = @in.ReadInt32(); Assert.AreEqual(CodecUtil.CODEC_MAGIC, val, file + " has no codec header, instead found: " + val); success = true; } finally { if (success) { IOUtils.Dispose(@in); } else { IOUtils.DisposeWhileHandlingException(@in); } } } }
/// <summary> /// Sole constructor. </summary> public MonotonicBlockPackedReader(IndexInput @in, int packedIntsVersion, int blockSize, long valueCount, bool direct) { this.valueCount = valueCount; blockShift = PackedInt32s.CheckBlockSize(blockSize, AbstractBlockPackedWriter.MIN_BLOCK_SIZE, AbstractBlockPackedWriter.MAX_BLOCK_SIZE); blockMask = blockSize - 1; int numBlocks = PackedInt32s.NumBlocks(valueCount, blockSize); minValues = new long[numBlocks]; averages = new float[numBlocks]; subReaders = new PackedInt32s.Reader[numBlocks]; for (int i = 0; i < numBlocks; ++i) { minValues[i] = @in.ReadVInt64(); averages[i] = J2N.BitConversion.Int32BitsToSingle(@in.ReadInt32()); int bitsPerValue = @in.ReadVInt32(); if (bitsPerValue > 64) { throw new Exception("Corrupted"); } if (bitsPerValue == 0) { subReaders[i] = new PackedInt32s.NullReader(blockSize); } else { int size = (int)Math.Min(blockSize, valueCount - (long)i * blockSize); if (direct) { long pointer = @in.GetFilePointer(); subReaders[i] = PackedInt32s.GetDirectReaderNoHeader(@in, PackedInt32s.Format.PACKED, packedIntsVersion, size, bitsPerValue); @in.Seek(pointer + PackedInt32s.Format.PACKED.ByteCount(packedIntsVersion, size, bitsPerValue)); } else { subReaders[i] = PackedInt32s.GetReaderNoHeader(@in, PackedInt32s.Format.PACKED, packedIntsVersion, size, bitsPerValue); } } } }
private BinaryDocValues LoadBytesFixedDeref(FieldInfo field) { string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name + "_" + Convert.ToString(field.Number, CultureInfo.InvariantCulture), segmentSuffix, "dat"); string indexName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name + "_" + Convert.ToString(field.Number, CultureInfo.InvariantCulture), segmentSuffix, "idx"); IndexInput data = null; IndexInput index = null; bool success = false; try { data = dir.OpenInput(dataName, state.Context); CodecUtil.CheckHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_START, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_CURRENT); index = dir.OpenInput(indexName, state.Context); CodecUtil.CheckHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_START, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_CURRENT); int fixedLength = data.ReadInt32(); int valueCount = index.ReadInt32(); PagedBytes bytes = new PagedBytes(16); bytes.Copy(data, fixedLength * (long)valueCount); PagedBytes.Reader bytesReader = bytes.Freeze(true); PackedInt32s.Reader reader = PackedInt32s.GetReader(index); CodecUtil.CheckEOF(data); CodecUtil.CheckEOF(index); ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + reader.RamBytesUsed()); success = true; return(new BinaryDocValuesAnonymousClass3(fixedLength, bytesReader, reader)); } finally { if (success) { IOUtils.Dispose(data, index); } else { IOUtils.DisposeWhileHandlingException(data, index); } } }
/// <summary> /// Verifies that the code version which wrote the segment is supported. </summary> public static void CheckCodeVersion(Directory dir, string segment) { string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION); IndexInput idxStream = dir.OpenInput(indexStreamFN, IOContext.DEFAULT); try { int format = idxStream.ReadInt32(); if (format < FORMAT_MINIMUM) { throw new IndexFormatTooOldException(idxStream, format, FORMAT_MINIMUM, FORMAT_CURRENT); } if (format > FORMAT_CURRENT) { throw new IndexFormatTooNewException(idxStream, format, FORMAT_MINIMUM, FORMAT_CURRENT); } } finally { idxStream.Dispose(); } }
public virtual void TestDoubleClose() { Directory newDir = NewDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), true); IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random)); @out.WriteInt32(0); @out.Dispose(); csw.Dispose(); // close a second time - must have no effect according to IDisposable csw.Dispose(); csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), false); IndexInput openInput = csw.OpenInput("d.xyz", NewIOContext(Random)); Assert.AreEqual(0, openInput.ReadInt32()); openInput.Dispose(); csw.Dispose(); // close a second time - must have no effect according to IDisposable csw.Dispose(); newDir.Dispose(); }
protected internal VariableInt32BlockIndexInput(IndexInput input) { this.input = input; m_maxBlockSize = input.ReadInt32(); }
public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context) { string segment = Lucene3xSegmentInfoFormat.GetDocStoreSegment(si); int docStoreOffset = Lucene3xSegmentInfoFormat.GetDocStoreOffset(si); int size = si.DocCount; bool success = false; fieldInfos = fn; try { if (docStoreOffset != -1 && Lucene3xSegmentInfoFormat.GetDocStoreIsCompoundFile(si)) { d = storeCFSReader = new CompoundFileDirectory(si.Dir, IndexFileNames.SegmentFileName(segment, "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION), context, false); } else { storeCFSReader = null; } fieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context); string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION); indexStream = d.OpenInput(indexStreamFN, context); format = indexStream.ReadInt32(); if (format < FORMAT_MINIMUM) { throw new IndexFormatTooOldException(indexStream, format, FORMAT_MINIMUM, FORMAT_CURRENT); } if (format > FORMAT_CURRENT) { throw new IndexFormatTooNewException(indexStream, format, FORMAT_MINIMUM, FORMAT_CURRENT); } long indexSize = indexStream.Length - FORMAT_SIZE; if (docStoreOffset != -1) { // We read only a slice out of this shared fields file this.docStoreOffset = docStoreOffset; this.size = size; // Verify the file is long enough to hold all of our // docs if (Debugging.AssertsEnabled) { Debugging.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize={0} size={1} docStoreOffset={2}", indexSize, size, docStoreOffset); } } else { this.docStoreOffset = 0; this.size = (int)(indexSize >> 3); // Verify two sources of "maxDoc" agree: if (this.size != si.DocCount) { throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + this.size + " but segmentInfo shows " + si.DocCount); } } numTotalDocs = (int)(indexSize >> 3); success = true; } finally { // With lock-less commits, it's entirely possible (and // fine) to hit a FileNotFound exception above. In // this case, we want to explicitly close any subset // of things that were opened so that we don't have to // wait for a GC to do so. if (!success) { try { Dispose(); } // keep our original exception #pragma warning disable 168 catch (Exception t) #pragma warning restore 168 { } } } }
private void ReadBits(IndexInput input) { count = input.ReadInt32(); // read count bits = new byte[GetNumBytes(size)]; // allocate bits input.ReadBytes(bits, 0, bits.Length); }