public /*internal*/ Document Doc(int n, FieldSelector fieldSelector, IState state) { SeekIndex(n, state); long position = indexStream.ReadLong(state); fieldsStream.Seek(position, state); var doc = new Document(); int numFields = fieldsStream.ReadVInt(state); for (int i = 0; i < numFields; i++) { int fieldNumber = fieldsStream.ReadVInt(state); FieldInfo fi = fieldInfos.FieldInfo(fieldNumber); FieldSelectorResult acceptField = fieldSelector == null?FieldSelectorResult.LOAD:fieldSelector.Accept(fi.name); byte bits = fieldsStream.ReadByte(state); System.Diagnostics.Debug.Assert(bits <= FieldsWriter.FIELD_IS_COMPRESSED + FieldsWriter.FIELD_IS_TOKENIZED + FieldsWriter.FIELD_IS_BINARY); bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0; System.Diagnostics.Debug.Assert( (!compressed || (format < FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS)), "compressed fields are only allowed in indexes of version <= 2.9"); bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0; bool binary = (bits & FieldsWriter.FIELD_IS_BINARY) != 0; //TODO: Find an alternative approach here if this list continues to grow beyond the //list of 5 or 6 currently here. See Lucene 762 for discussion if (acceptField.Equals(FieldSelectorResult.LOAD)) { AddField(doc, fi, binary, compressed, tokenize, state); } else if (acceptField.Equals(FieldSelectorResult.LOAD_AND_BREAK)) { AddField(doc, fi, binary, compressed, tokenize, state); break; //Get out of this loop } else if (acceptField.Equals(FieldSelectorResult.LAZY_LOAD)) { AddFieldLazy(doc, fi, binary, compressed, tokenize, state); } else if (acceptField.Equals(FieldSelectorResult.SIZE)) { SkipField(binary, compressed, AddFieldSize(doc, fi, binary, compressed, state), state); } else if (acceptField.Equals(FieldSelectorResult.SIZE_AND_BREAK)) { AddFieldSize(doc, fi, binary, compressed, state); break; } else { SkipField(binary, compressed, state); } } return(doc); }
private void Read(IndexInput input, String fileName, IState state) { int firstInt = input.ReadVInt(state); if (firstInt < 0) { // This is a real format format = firstInt; } else { format = FORMAT_PRE; } if (format != FORMAT_PRE & format != FORMAT_START) { throw new CorruptIndexException("unrecognized format " + format + " in file \"" + fileName + "\""); } int size; if (format == FORMAT_PRE) { size = firstInt; } else { size = input.ReadVInt(state); //read in the size } for (int i = 0; i < size; i++) { String name = StringHelper.Intern(input.ReadString(state)); byte bits = input.ReadByte(state); bool isIndexed = (bits & IS_INDEXED) != 0; bool storeTermVector = (bits & STORE_TERMVECTOR) != 0; bool storePositionsWithTermVector = (bits & STORE_POSITIONS_WITH_TERMVECTOR) != 0; bool storeOffsetWithTermVector = (bits & STORE_OFFSET_WITH_TERMVECTOR) != 0; bool omitNorms = (bits & OMIT_NORMS) != 0; bool storePayloads = (bits & STORE_PAYLOADS) != 0; bool omitTermFreqAndPositions = (bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0; AddInternal(name, isIndexed, storeTermVector, storePositionsWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions); } if (input.FilePointer(state) != input.Length(state)) { throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.FilePointer(state) + " vs size " + input.Length(state)); } }
/// <summary>read as a d-gaps list </summary> private void ReadDgaps(IndexInput input, IState state) { size = input.ReadInt(state); // (re)read size count = input.ReadInt(state); // read count bits = new byte[(size >> 3) + 1]; // allocate bits int last = 0; int n = Count(); while (n > 0) { last += input.ReadVInt(state); bits[last] = input.ReadByte(state); n -= BYTE_COUNTS[bits[last] & 0xFF]; } }
/// <summary> /// Sole constructor. </summary> public BlockPackedReader(IndexInput @in, int packedIntsVersion, int blockSize, long valueCount, bool direct) { this.ValueCount = valueCount; BlockShift = PackedInts.CheckBlockSize(blockSize, AbstractBlockPackedWriter.MIN_BLOCK_SIZE, AbstractBlockPackedWriter.MAX_BLOCK_SIZE); BlockMask = blockSize - 1; int numBlocks = PackedInts.NumBlocks(valueCount, blockSize); long[] minValues = null; SubReaders = new PackedInts.Reader[numBlocks]; for (int i = 0; i < numBlocks; ++i) { int token = @in.ReadByte() & 0xFF; int bitsPerValue = (int)((uint)token >> AbstractBlockPackedWriter.BPV_SHIFT); if (bitsPerValue > 64) { throw new Exception("Corrupted"); } if ((token & AbstractBlockPackedWriter.MIN_VALUE_EQUALS_0) == 0) { if (minValues == null) { minValues = new long[numBlocks]; } minValues[i] = BlockPackedReaderIterator.ZigZagDecode(1L + BlockPackedReaderIterator.ReadVLong(@in)); } if (bitsPerValue == 0) { SubReaders[i] = new PackedInts.NullReader(blockSize); } else { int size = (int)Math.Min(blockSize, valueCount - (long)i * blockSize); if (direct) { long pointer = @in.FilePointer; SubReaders[i] = PackedInts.GetDirectReaderNoHeader(@in, PackedInts.Format.PACKED, packedIntsVersion, size, bitsPerValue); @in.Seek(pointer + PackedInts.Format.PACKED.ByteCount(packedIntsVersion, size, bitsPerValue)); } else { SubReaders[i] = PackedInts.GetReaderNoHeader(@in, PackedInts.Format.PACKED, packedIntsVersion, size, bitsPerValue); } } } this.MinValues = minValues; }
public virtual void TestDirectInstantiation() { DirectoryInfo path = CreateTempDir("testDirectInstantiation"); byte[] largeBuffer = new byte[Random().Next(256 * 1024)], largeReadBuffer = new byte[largeBuffer.Length]; for (int i = 0; i < largeBuffer.Length; i++) { largeBuffer[i] = (byte)i; // automatically loops with modulo } var dirs = new FSDirectory[] { new SimpleFSDirectory(path, null), new NIOFSDirectory(path, null), new MMapDirectory(path, null) }; for (int i = 0; i < dirs.Length; i++) { FSDirectory dir = dirs[i]; dir.EnsureOpen(); string fname = "foo." + i; string lockname = "foo" + i + ".lck"; IndexOutput @out = dir.CreateOutput(fname, NewIOContext(Random())); @out.WriteByte((byte)(sbyte)i); @out.WriteBytes(largeBuffer, largeBuffer.Length); @out.Dispose(); for (int j = 0; j < dirs.Length; j++) { FSDirectory d2 = dirs[j]; d2.EnsureOpen(); Assert.IsTrue(SlowFileExists(d2, fname)); Assert.AreEqual(1 + largeBuffer.Length, d2.FileLength(fname)); // don't do read tests if unmapping is not supported! if (d2 is MMapDirectory && !((MMapDirectory)d2).UseUnmap) { continue; } IndexInput input = d2.OpenInput(fname, NewIOContext(Random())); Assert.AreEqual((byte)i, input.ReadByte()); // read array with buffering enabled Arrays.Fill(largeReadBuffer, (byte)0); input.ReadBytes(largeReadBuffer, 0, largeReadBuffer.Length, true); Assert.AreEqual(largeBuffer, largeReadBuffer); // read again without using buffer input.Seek(1L); Arrays.Fill(largeReadBuffer, (byte)0); input.ReadBytes(largeReadBuffer, 0, largeReadBuffer.Length, false); Assert.AreEqual(largeBuffer, largeReadBuffer); input.Dispose(); } // delete with a different dir dirs[(i + 1) % dirs.Length].DeleteFile(fname); for (int j = 0; j < dirs.Length; j++) { FSDirectory d2 = dirs[j]; Assert.IsFalse(SlowFileExists(d2, fname)); } Lock @lock = dir.MakeLock(lockname); Assert.IsTrue(@lock.Obtain()); for (int j = 0; j < dirs.Length; j++) { FSDirectory d2 = dirs[j]; Lock lock2 = d2.MakeLock(lockname); try { Assert.IsFalse(lock2.Obtain(1)); } catch (LockObtainFailedException e) { // OK } } @lock.Dispose(); // now lock with different dir @lock = dirs[(i + 1) % dirs.Length].MakeLock(lockname); Assert.IsTrue(@lock.Obtain()); @lock.Dispose(); } for (int i = 0; i < dirs.Length; i++) { FSDirectory dir = dirs[i]; dir.EnsureOpen(); dir.Dispose(); Assert.IsFalse(dir.IsOpen); } }
/// <summary> /// Skip the next block of data. /// </summary> /// <param name="in"> the input where to read data </param> /// <exception cref="IOException"> If there is a low-level I/O error </exception> public void SkipBlock(IndexInput @in) { int numBits = @in.ReadByte(); if (numBits == ALL_VALUES_EQUAL) { @in.ReadVInt(); return; } Debug.Assert(numBits > 0 && numBits <= 32, numBits.ToString()); int encodedSize = EncodedSizes[numBits]; @in.Seek(@in.FilePointer + encodedSize); }
/// <summary> /// Read the next block of data (<code>For</code> format). /// </summary> /// <param name="in"> the input to use to read data </param> /// <param name="encoded"> a buffer that can be used to store encoded data </param> /// <param name="decoded"> where to write decoded data </param> /// <exception cref="IOException"> If there is a low-level I/O error </exception> public void ReadBlock(IndexInput @in, sbyte[] encoded, int[] decoded) { int numBits = @in.ReadByte(); Debug.Assert(numBits <= 32, numBits.ToString()); if (numBits == ALL_VALUES_EQUAL) { int value = @in.ReadVInt(); CollectionsHelper.Fill(decoded, 0, Lucene41PostingsFormat.BLOCK_SIZE, value); return; } int encodedSize = EncodedSizes[numBits]; @in.ReadBytes(encoded, 0, encodedSize); PackedInts.Decoder decoder = Decoders[numBits]; int iters = Iterations[numBits]; Debug.Assert(iters * decoder.ByteValueCount() >= Lucene41PostingsFormat.BLOCK_SIZE); decoder.Decode(encoded, 0, decoded, 0, iters); }
public virtual void TestCopyBytesMem() { int num = AtLeast(10); for (int iter = 0; iter < num; iter++) { Directory dir = NewDirectory(); if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter + " dir=" + dir); } // make random file IndexOutput @out = dir.CreateOutput("test", NewIOContext(Random())); var bytes = new byte[TestUtil.NextInt(Random(), 1, 77777)]; int size = TestUtil.NextInt(Random(), 1, 1777777); int upto = 0; int byteUpto = 0; while (upto < size) { bytes[byteUpto++] = Value(upto); upto++; if (byteUpto == bytes.Length) { @out.WriteBytes(bytes, 0, bytes.Length); byteUpto = 0; } } @out.WriteBytes(bytes, 0, byteUpto); Assert.AreEqual(size, @out.GetFilePointer()); @out.Dispose(); Assert.AreEqual(size, dir.FileLength("test")); // copy from test -> test2 IndexInput @in = dir.OpenInput("test", NewIOContext(Random())); @out = dir.CreateOutput("test2", NewIOContext(Random())); upto = 0; while (upto < size) { if (Random().NextBoolean()) { @out.WriteByte(@in.ReadByte()); upto++; } else { int chunk = Math.Min(TestUtil.NextInt(Random(), 1, bytes.Length), size - upto); @out.CopyBytes(@in, chunk); upto += chunk; } } Assert.AreEqual(size, upto); @out.Dispose(); @in.Dispose(); // verify IndexInput in2 = dir.OpenInput("test2", NewIOContext(Random())); upto = 0; while (upto < size) { if (Random().NextBoolean()) { var v = in2.ReadByte(); Assert.AreEqual(Value(upto), v); upto++; } else { int limit = Math.Min(TestUtil.NextInt(Random(), 1, bytes.Length), size - upto); in2.ReadBytes(bytes, 0, limit); for (int byteIdx = 0; byteIdx < limit; byteIdx++) { Assert.AreEqual(Value(upto), bytes[byteIdx]); upto++; } } } in2.Dispose(); dir.DeleteFile("test"); dir.DeleteFile("test2"); dir.Dispose(); } }
private void ReadFields(IndexInput meta) { int fieldNumber = meta.ReadVInt(); while (fieldNumber != -1) { int fieldType = meta.ReadByte(); if (fieldType == NUMBER) { numerics[fieldNumber] = ReadNumericEntry(meta); } else if (fieldType == BYTES) { binaries[fieldNumber] = ReadBinaryEntry(meta); } else if (fieldType == SORTED) { sorteds[fieldNumber] = ReadSortedEntry(meta); } else if (fieldType == SORTED_SET) { sortedSets[fieldNumber] = ReadSortedSetEntry(meta); } else { throw new CorruptIndexException("invalid entry type: " + fieldType + ", input=" + meta); } fieldNumber = meta.ReadVInt(); } }
private static NumericEntry ReadNumericEntry(IndexInput meta) { var entry = new NumericEntry { offset = meta.ReadLong(), count = meta.ReadInt(), missingOffset = meta.ReadLong() }; if (entry.missingOffset != -1) { entry.missingBytes = meta.ReadLong(); } else { entry.missingBytes = 0; } entry.byteWidth = meta.ReadByte(); return entry; }
private void ReadFields(IndexInput meta, FieldInfos infos) { int fieldNumber = meta.ReadVInt(); while (fieldNumber != -1) { int fieldType = meta.ReadByte(); if (fieldType == NUMBER) { var entry = new NumericEntry {offset = meta.ReadLong(), missingOffset = meta.ReadLong()}; if (entry.missingOffset != -1) { entry.missingBytes = meta.ReadLong(); } else { entry.missingBytes = 0; } entry.format = meta.ReadByte(); switch (entry.format) { case DELTA_COMPRESSED: case TABLE_COMPRESSED: case GCD_COMPRESSED: case UNCOMPRESSED: break; default: throw new CorruptIndexException("Unknown format: " + entry.format + ", input=" + meta); } if (entry.format != UNCOMPRESSED) { entry.packedIntsVersion = meta.ReadVInt(); } numerics[fieldNumber] = entry; } else if (fieldType == BYTES) { var entry = new BinaryEntry { offset = meta.ReadLong(), numBytes = meta.ReadLong(), missingOffset = meta.ReadLong() }; if (entry.missingOffset != -1) { entry.missingBytes = meta.ReadLong(); } else { entry.missingBytes = 0; } entry.minLength = meta.ReadVInt(); entry.maxLength = meta.ReadVInt(); if (entry.minLength != entry.maxLength) { entry.packedIntsVersion = meta.ReadVInt(); entry.blockSize = meta.ReadVInt(); } binaries[fieldNumber] = entry; } else if (fieldType == FST) { var entry = new FSTEntry {offset = meta.ReadLong(), numOrds = meta.ReadVLong()}; fsts[fieldNumber] = entry; } else { throw new CorruptIndexException("invalid entry type: " + fieldType + ", input=" + meta); } fieldNumber = meta.ReadVInt(); } }
public override byte ReadByte() { EnsureOpen(); return(@delegate.ReadByte()); }
private void ReadFields(IndexInput meta, FieldInfos infos) { int fieldNumber = meta.ReadVInt(); while (fieldNumber != -1) { // check should be: infos.fieldInfo(fieldNumber) != null, which incorporates negative check // but docvalues updates are currently buggy here (loading extra stuff, etc): LUCENE-5616 if (fieldNumber < 0) { // trickier to validate more: because we re-use for norms, because we use multiple entries // for "composite" types like sortedset, etc. throw new CorruptIndexException("Invalid field number: " + fieldNumber + ", input=" + meta); } int fieldType = meta.ReadByte(); if (fieldType == NUMBER) { var entry = new NumericEntry {Offset = meta.ReadLong(), Format = (sbyte)meta.ReadByte()}; switch (entry.Format) { case DELTA_COMPRESSED: case TABLE_COMPRESSED: case GCD_COMPRESSED: case UNCOMPRESSED: break; default: throw new CorruptIndexException("Unknown format: " + entry.Format + ", input=" + meta); } if (entry.Format != UNCOMPRESSED) { entry.PackedIntsVersion = meta.ReadVInt(); } Numerics[fieldNumber] = entry; } else if (fieldType == BYTES) { BinaryEntry entry = new BinaryEntry(); entry.Offset = meta.ReadLong(); entry.NumBytes = meta.ReadLong(); entry.MinLength = meta.ReadVInt(); entry.MaxLength = meta.ReadVInt(); if (entry.MinLength != entry.MaxLength) { entry.PackedIntsVersion = meta.ReadVInt(); entry.BlockSize = meta.ReadVInt(); } Binaries[fieldNumber] = entry; } else if (fieldType == FST) { FSTEntry entry = new FSTEntry(); entry.Offset = meta.ReadLong(); entry.NumOrds = meta.ReadVLong(); Fsts[fieldNumber] = entry; } else { throw new CorruptIndexException("invalid entry type: " + fieldType + ", input=" + meta); } fieldNumber = meta.ReadVInt(); } }
/// <summary> Construct a new SegmentInfo instance by reading a /// previously saved SegmentInfo from input. /// /// </summary> /// <param name="dir">directory to load from /// </param> /// <param name="format">format of the segments info file /// </param> /// <param name="input">input handle to read segment info from /// </param> internal SegmentInfo(Directory dir, int format, IndexInput input, IState state) { this.dir = dir; name = input.ReadString(state); docCount = input.ReadInt(state); if (format <= SegmentInfos.FORMAT_LOCKLESS) { delGen = input.ReadLong(state); if (format <= SegmentInfos.FORMAT_SHARED_DOC_STORE) { docStoreOffset = input.ReadInt(state); if (docStoreOffset != -1) { docStoreSegment = input.ReadString(state); docStoreIsCompoundFile = (1 == input.ReadByte(state)); } else { docStoreSegment = name; docStoreIsCompoundFile = false; } } else { docStoreOffset = -1; docStoreSegment = name; docStoreIsCompoundFile = false; } if (format <= SegmentInfos.FORMAT_SINGLE_NORM_FILE) { hasSingleNormFile = (1 == input.ReadByte(state)); } else { hasSingleNormFile = false; } int numNormGen = input.ReadInt(state); if (numNormGen == NO) { normGen = null; } else { normGen = new long[numNormGen]; for (int j = 0; j < numNormGen; j++) { normGen[j] = input.ReadLong(state); } } isCompoundFile = (sbyte)input.ReadByte(state); preLockless = (isCompoundFile == CHECK_DIR); if (format <= SegmentInfos.FORMAT_DEL_COUNT) { delCount = input.ReadInt(state); System.Diagnostics.Debug.Assert(delCount <= docCount); } else { delCount = -1; } if (format <= SegmentInfos.FORMAT_HAS_PROX) { hasProx = input.ReadByte(state) == 1; } else { hasProx = true; } if (format <= SegmentInfos.FORMAT_DIAGNOSTICS) { diagnostics = input.ReadStringStringMap(state); } else { diagnostics = new Dictionary <string, string>(); } } else { delGen = CHECK_DIR; normGen = null; isCompoundFile = (sbyte)(CHECK_DIR); preLockless = true; hasSingleNormFile = false; docStoreOffset = -1; docStoreIsCompoundFile = false; docStoreSegment = null; delCount = -1; hasProx = true; diagnostics = new Dictionary <string, string>(); } }
/// <summary> /// Helper method that reads CFS entries from an input stream </summary> private static IDictionary <string, FileEntry> ReadEntries(IndexInputSlicer handle, Directory dir, string name) { IOException priorE = null; IndexInput stream = null; ChecksumIndexInput entriesStream = null; // read the first VInt. If it is negative, it's the version number // otherwise it's the count (pre-3.1 indexes) try { IDictionary <string, FileEntry> mapping; #pragma warning disable 612, 618 stream = handle.OpenFullSlice(); #pragma warning restore 612, 618 int firstInt = stream.ReadVInt32(); // impossible for 3.0 to have 63 files in a .cfs, CFS writer was not visible // and separate norms/etc are outside of cfs. if (firstInt == CODEC_MAGIC_BYTE1) { sbyte secondByte = (sbyte)stream.ReadByte(); sbyte thirdByte = (sbyte)stream.ReadByte(); sbyte fourthByte = (sbyte)stream.ReadByte(); if (secondByte != CODEC_MAGIC_BYTE2 || thirdByte != CODEC_MAGIC_BYTE3 || fourthByte != CODEC_MAGIC_BYTE4) { throw new CorruptIndexException("Illegal/impossible header for CFS file: " + secondByte + "," + thirdByte + "," + fourthByte); } int version = CodecUtil.CheckHeaderNoMagic(stream, CompoundFileWriter.DATA_CODEC, CompoundFileWriter.VERSION_START, CompoundFileWriter.VERSION_CURRENT); string entriesFileName = IndexFileNames.SegmentFileName( IndexFileNames.StripExtension(name), "", IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION); entriesStream = dir.OpenChecksumInput(entriesFileName, IOContext.READ_ONCE); CodecUtil.CheckHeader(entriesStream, CompoundFileWriter.ENTRY_CODEC, CompoundFileWriter.VERSION_START, CompoundFileWriter.VERSION_CURRENT); int numEntries = entriesStream.ReadVInt32(); mapping = new Dictionary <string, FileEntry>(numEntries); for (int i = 0; i < numEntries; i++) { FileEntry fileEntry = new FileEntry(); string id = entriesStream.ReadString(); FileEntry previous = mapping.Put(id, fileEntry); if (previous != null) { throw new CorruptIndexException("Duplicate cfs entry id=" + id + " in CFS: " + entriesStream); } fileEntry.Offset = entriesStream.ReadInt64(); fileEntry.Length = entriesStream.ReadInt64(); } if (version >= CompoundFileWriter.VERSION_CHECKSUM) { CodecUtil.CheckFooter(entriesStream); } else { #pragma warning disable 612, 618 CodecUtil.CheckEOF(entriesStream); #pragma warning restore 612, 618 } } else { // TODO remove once 3.x is not supported anymore mapping = ReadLegacyEntries(stream, firstInt); } return(mapping); } catch (IOException ioe) { priorE = ioe; } finally { IOUtils.DisposeWhileHandlingException(priorE, stream, entriesStream); } // this is needed until Java 7's real try-with-resources: throw new InvalidOperationException("impossible to get here"); }
public virtual void TestDirectInstantiation() { System.IO.DirectoryInfo path = new System.IO.DirectoryInfo(AppSettings.Get("tempDir", System.IO.Path.GetTempPath())); int sz = 2; Directory[] dirs = new Directory[sz]; dirs[0] = new SimpleFSDirectory(path, null); // dirs[1] = new NIOFSDirectory(path, null); System.Console.WriteLine("Skipping NIOFSDirectory() test under Lucene.Net"); dirs[1] = new MMapDirectory(path, null); for (int i = 0; i < sz; i++) { Directory dir = dirs[i]; dir.EnsureOpen(); System.String fname = "foo." + i; System.String lockname = "foo" + i + ".lck"; IndexOutput out_Renamed = dir.CreateOutput(fname, null); out_Renamed.WriteByte((byte)i); out_Renamed.Close(); for (int j = 0; j < sz; j++) { Directory d2 = dirs[j]; d2.EnsureOpen(); Assert.IsTrue(d2.FileExists(fname, null)); Assert.AreEqual(1, d2.FileLength(fname, null)); // don't test read on MMapDirectory, since it can't really be // closed and will cause a failure to delete the file. if (d2 is MMapDirectory) { continue; } IndexInput input = d2.OpenInput(fname, null); Assert.AreEqual((byte)i, input.ReadByte(null)); input.Close(); } // delete with a different dir dirs[(i + 1) % sz].DeleteFile(fname, null); for (int j = 0; j < sz; j++) { Directory d2 = dirs[j]; Assert.IsFalse(d2.FileExists(fname, null)); } Lock lock_Renamed = dir.MakeLock(lockname); Assert.IsTrue(lock_Renamed.Obtain()); for (int j = 0; j < sz; j++) { Directory d2 = dirs[j]; Lock lock2 = d2.MakeLock(lockname); try { Assert.IsFalse(lock2.Obtain(1)); } catch (LockObtainFailedException) { // OK } } lock_Renamed.Release(); // now lock with different dir lock_Renamed = dirs[(i + 1) % sz].MakeLock(lockname); Assert.IsTrue(lock_Renamed.Obtain()); lock_Renamed.Release(); } for (int i = 0; i < sz; i++) { Directory dir = dirs[i]; dir.EnsureOpen(); dir.Close(); Assert.IsFalse(dir.isOpen_ForNUnit); } }
/// <summary> </summary> /// <param name="field">The field to read in /// </param> /// <param name="tvfPointer">The pointer within the tvf file where we should start reading /// </param> /// <param name="mapper">The mapper used to map the TermVector /// </param> /// <throws> IOException </throws> private void ReadTermVector(System.String field, long tvfPointer, TermVectorMapper mapper, IState state) { // Now read the data from specified position //We don't need to offset by the FORMAT here since the pointer already includes the offset tvf.Seek(tvfPointer, state); int numTerms = tvf.ReadVInt(state); //System.out.println("Num Terms: " + numTerms); // If no terms - return a constant empty termvector. However, this should never occur! if (numTerms == 0) { return; } bool storePositions; bool storeOffsets; if (format >= FORMAT_VERSION) { byte bits = tvf.ReadByte(state); storePositions = (bits & STORE_POSITIONS_WITH_TERMVECTOR) != 0; storeOffsets = (bits & STORE_OFFSET_WITH_TERMVECTOR) != 0; } else { tvf.ReadVInt(state); storePositions = false; storeOffsets = false; } mapper.SetExpectations(field, numTerms, storeOffsets, storePositions); int start = 0; int deltaLength = 0; int totalLength = 0; byte[] byteBuffer; char[] charBuffer; bool preUTF8 = format < FORMAT_UTF8_LENGTH_IN_BYTES; // init the buffers if (preUTF8) { charBuffer = new char[10]; byteBuffer = null; } else { charBuffer = null; byteBuffer = new byte[20]; } for (int i = 0; i < numTerms; i++) { start = tvf.ReadVInt(state); deltaLength = tvf.ReadVInt(state); totalLength = start + deltaLength; System.String term; if (preUTF8) { // Term stored as java chars if (charBuffer.Length < totalLength) { char[] newCharBuffer = new char[(int)(1.5 * totalLength)]; Array.Copy(charBuffer, 0, newCharBuffer, 0, start); charBuffer = newCharBuffer; } tvf.ReadChars(charBuffer, start, deltaLength, state); term = new System.String(charBuffer, 0, totalLength); } else { // Term stored as utf8 bytes if (byteBuffer.Length < totalLength) { byte[] newByteBuffer = new byte[(int)(1.5 * totalLength)]; Array.Copy(byteBuffer, 0, newByteBuffer, 0, start); byteBuffer = newByteBuffer; } tvf.ReadBytes(byteBuffer, start, deltaLength, state); term = System.Text.Encoding.UTF8.GetString(byteBuffer, 0, totalLength); } int freq = tvf.ReadVInt(state); int[] positions = null; if (storePositions) { //read in the positions //does the mapper even care about positions? if (mapper.IsIgnoringPositions == false) { positions = new int[freq]; int prevPosition = 0; for (int j = 0; j < freq; j++) { positions[j] = prevPosition + tvf.ReadVInt(state); prevPosition = positions[j]; } } else { //we need to skip over the positions. Since these are VInts, I don't believe there is anyway to know for sure how far to skip // for (int j = 0; j < freq; j++) { tvf.ReadVInt(state); } } } TermVectorOffsetInfo[] offsets = null; if (storeOffsets) { //does the mapper even care about offsets? if (mapper.IsIgnoringOffsets == false) { offsets = new TermVectorOffsetInfo[freq]; int prevOffset = 0; for (int j = 0; j < freq; j++) { int startOffset = prevOffset + tvf.ReadVInt(state); int endOffset = startOffset + tvf.ReadVInt(state); offsets[j] = new TermVectorOffsetInfo(startOffset, endOffset); prevOffset = endOffset; } } else { for (int j = 0; j < freq; j++) { tvf.ReadVInt(state); tvf.ReadVInt(state); } } } mapper.Map(term, freq, offsets, positions); } }