public override int NextDoc() { if (_docId == NO_MORE_DOCS) { return(_docId); } bool first = true; int termFreq = 0; while (true) { long lineStart = _in.GetFilePointer(); SimpleTextUtil.ReadLine(_in, _scratch); if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.DOC)) { if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) { _in.Seek(lineStart); if (!_omitTf) { _tf = termFreq; } return(_docId); } UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.DOC.Length, _scratch.Length - SimpleTextFieldsWriter.DOC.Length, _scratchUtf16); _docId = ArrayUtil.ParseInt32(_scratchUtf16.Chars, 0, _scratchUtf16.Length); termFreq = 0; first = false; } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FREQ)) { UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.FREQ.Length, _scratch.Length - SimpleTextFieldsWriter.FREQ.Length, _scratchUtf16); termFreq = ArrayUtil.ParseInt32(_scratchUtf16.Chars, 0, _scratchUtf16.Length); } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS)) { // skip termFreq++; } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET)) { // skip } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET)) { // skip } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.PAYLOAD)) { // skip } else { if (Debugging.AssertsEnabled) { Debugging.Assert( StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END), "scratch={0}", _scratch.Utf8ToString()); } if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) { _in.Seek(lineStart); if (!_omitTf) { _tf = termFreq; } return(_docId); } return(_docId = NO_MORE_DOCS); } } }
/// <summary> /// Sole constructor. </summary> public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segmentSuffix, FieldInfos fn, IOContext context, string formatName, CompressionMode compressionMode) { this.compressionMode = compressionMode; string segment = si.Name; bool success = false; fieldInfos = fn; numDocs = si.DocCount; ChecksumIndexInput indexStream = null; try { string indexStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION); string fieldsStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_EXTENSION); // Load the index into memory indexStream = d.OpenChecksumInput(indexStreamFN, context); string codecNameIdx = formatName + CompressingStoredFieldsWriter.CODEC_SFX_IDX; version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT); Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); long maxPointer = -1; if (version >= CompressingStoredFieldsWriter.VERSION_CHECKSUM) { maxPointer = indexStream.ReadVInt64(); CodecUtil.CheckFooter(indexStream); } else { #pragma warning disable 612, 618 CodecUtil.CheckEOF(indexStream); #pragma warning restore 612, 618 } indexStream.Dispose(); indexStream = null; // Open the data file and read metadata fieldsStream = d.OpenInput(fieldsStreamFN, context); if (version >= CompressingStoredFieldsWriter.VERSION_CHECKSUM) { if (maxPointer + CodecUtil.FooterLength() != fieldsStream.Length) { throw new CorruptIndexException("Invalid fieldsStream maxPointer (file truncated?): maxPointer=" + maxPointer + ", length=" + fieldsStream.Length); } } else { maxPointer = fieldsStream.Length; } this.maxPointer = maxPointer; string codecNameDat = formatName + CompressingStoredFieldsWriter.CODEC_SFX_DAT; int fieldsVersion = CodecUtil.CheckHeader(fieldsStream, codecNameDat, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT); if (version != fieldsVersion) { throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + fieldsVersion); } Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS) { chunkSize = fieldsStream.ReadVInt32(); } else { chunkSize = -1; } packedIntsVersion = fieldsStream.ReadVInt32(); decompressor = compressionMode.NewDecompressor(); this.bytes = new BytesRef(); success = true; } finally { if (!success) { IOUtils.DisposeWhileHandlingException(this, indexStream); } } }
// LUCENENET NOTE: Changed from public to internal because the class had to be made public, but is not for public use. internal SimpleTextDocValuesReader(SegmentReadState state, string ext) { data = state.Directory.OpenInput( IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, ext), state.Context); maxDoc = state.SegmentInfo.DocCount; while (true) { ReadLine(); if (scratch.Equals(SimpleTextDocValuesWriter.END)) { break; } Debug.Assert(StartsWith(SimpleTextDocValuesWriter.FIELD), scratch.Utf8ToString()); var fieldName = StripPrefix(SimpleTextDocValuesWriter.FIELD); var field = new OneField(); fields[fieldName] = field; ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.TYPE), scratch.Utf8ToString()); var dvType = (DocValuesType) Enum.Parse(typeof(DocValuesType), StripPrefix(SimpleTextDocValuesWriter.TYPE)); if (dvType == DocValuesType.NUMERIC) { ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.MINVALUE), "got " + scratch.Utf8ToString() + " field=" + fieldName + " ext=" + ext); field.MinValue = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.MINVALUE), CultureInfo.InvariantCulture); ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (1 + field.Pattern.Length + 2) * maxDoc); } else if (dvType == DocValuesType.BINARY) { ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); field.MaxLength = Convert.ToInt32(StripPrefix(SimpleTextDocValuesWriter.MAXLENGTH), CultureInfo.InvariantCulture); ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (9 + field.Pattern.Length + field.MaxLength + 2) * maxDoc); } else if (dvType == DocValuesType.SORTED || dvType == DocValuesType.SORTED_SET) { ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.NUMVALUES)); field.NumValues = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.NUMVALUES), CultureInfo.InvariantCulture); ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); field.MaxLength = Convert.ToInt32(StripPrefix(SimpleTextDocValuesWriter.MAXLENGTH), CultureInfo.InvariantCulture); ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); ReadLine(); Debug.Assert(StartsWith(SimpleTextDocValuesWriter.ORDPATTERN)); field.OrdPattern = StripPrefix(SimpleTextDocValuesWriter.ORDPATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (9 + field.Pattern.Length + field.MaxLength) * field.NumValues + (1 + field.OrdPattern.Length) * maxDoc); } else { throw new ArgumentOutOfRangeException(); } } // We should only be called from above if at least one // field has DVs: Debug.Assert(fields.Count > 0); }
public override int NextPosition() { if (lazyProxPointer != -1) { proxIn.Seek(lazyProxPointer); lazyProxPointer = -1; } if (payloadPending && payloadLength > 0) { // payload of last position was never retrieved -- skip it proxIn.Seek(proxIn.GetFilePointer() + payloadLength); payloadPending = false; } // scan over any docs that were iterated without their positions while (posPendingCount > freq) { int code = proxIn.ReadVInt32(); if (storePayloads) { if ((code & 1) != 0) { // new payload length payloadLength = proxIn.ReadVInt32(); if (Debugging.AssertsEnabled) { Debugging.Assert(payloadLength >= 0); } } if (Debugging.AssertsEnabled) { Debugging.Assert(payloadLength != -1); } } if (storeOffsets) { if ((proxIn.ReadVInt32() & 1) != 0) { // new offset length offsetLength = proxIn.ReadVInt32(); } } if (storePayloads) { proxIn.Seek(proxIn.GetFilePointer() + payloadLength); } posPendingCount--; position = 0; startOffset = 0; payloadPending = false; //System.out.println("StandardR.D&PE skipPos"); } // read next position if (payloadPending && payloadLength > 0) { // payload wasn't retrieved for last position proxIn.Seek(proxIn.GetFilePointer() + payloadLength); } int code_ = proxIn.ReadVInt32(); if (storePayloads) { if ((code_ & 1) != 0) { // new payload length payloadLength = proxIn.ReadVInt32(); if (Debugging.AssertsEnabled) { Debugging.Assert(payloadLength >= 0); } } if (Debugging.AssertsEnabled) { Debugging.Assert(payloadLength != -1); } payloadPending = true; code_ = (int)((uint)code_ >> 1); } position += code_; if (storeOffsets) { int offsetCode = proxIn.ReadVInt32(); if ((offsetCode & 1) != 0) { // new offset length offsetLength = proxIn.ReadVInt32(); } startOffset += (int)((uint)offsetCode >> 1); } posPendingCount--; if (Debugging.AssertsEnabled) { Debugging.Assert(posPendingCount >= 0, "NextPosition() was called too many times (more than Freq times) posPendingCount={0}", posPendingCount); } //System.out.println("StandardR.D&PE nextPos return pos=" + position); return(position); }
public override void VisitDocument(int docID, StoredFieldVisitor visitor) { fieldsStream.Seek(indexReader.GetStartPointer(docID)); int docBase = fieldsStream.ReadVInt32(); int chunkDocs = fieldsStream.ReadVInt32(); if (docID < docBase || docID >= docBase + chunkDocs || docBase + chunkDocs > numDocs) { throw new CorruptIndexException("Corrupted: docID=" + docID + ", docBase=" + docBase + ", chunkDocs=" + chunkDocs + ", numDocs=" + numDocs + " (resource=" + fieldsStream + ")"); } int numStoredFields, offset, length, totalLength; if (chunkDocs == 1) { numStoredFields = fieldsStream.ReadVInt32(); offset = 0; length = fieldsStream.ReadVInt32(); totalLength = length; } else { int bitsPerStoredFields = fieldsStream.ReadVInt32(); if (bitsPerStoredFields == 0) { numStoredFields = fieldsStream.ReadVInt32(); } else if (bitsPerStoredFields > 31) { throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields + " (resource=" + fieldsStream + ")"); } else { long filePointer = fieldsStream.GetFilePointer(); PackedInt32s.Reader reader = PackedInt32s.GetDirectReaderNoHeader(fieldsStream, PackedInt32s.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerStoredFields); numStoredFields = (int)(reader.Get(docID - docBase)); fieldsStream.Seek(filePointer + PackedInt32s.Format.PACKED.ByteCount(packedIntsVersion, chunkDocs, bitsPerStoredFields)); } int bitsPerLength = fieldsStream.ReadVInt32(); if (bitsPerLength == 0) { length = fieldsStream.ReadVInt32(); offset = (docID - docBase) * length; totalLength = chunkDocs * length; } else if (bitsPerStoredFields > 31) { throw new CorruptIndexException("bitsPerLength=" + bitsPerLength + " (resource=" + fieldsStream + ")"); } else { PackedInt32s.IReaderIterator it = PackedInt32s.GetReaderIteratorNoHeader(fieldsStream, PackedInt32s.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerLength, 1); int off = 0; for (int i = 0; i < docID - docBase; ++i) { off += (int)it.Next(); } offset = off; length = (int)it.Next(); off += length; for (int i = docID - docBase + 1; i < chunkDocs; ++i) { off += (int)it.Next(); } totalLength = off; } } if ((length == 0) != (numStoredFields == 0)) { throw new CorruptIndexException("length=" + length + ", numStoredFields=" + numStoredFields + " (resource=" + fieldsStream + ")"); } if (numStoredFields == 0) { // nothing to do return; } DataInput documentInput; if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS && totalLength >= 2 * chunkSize) { Debug.Assert(chunkSize > 0); Debug.Assert(offset < chunkSize); decompressor.Decompress(fieldsStream, chunkSize, offset, Math.Min(length, chunkSize - offset), bytes); documentInput = new DataInputAnonymousInnerClassHelper(this, offset, length); } else { BytesRef bytes = totalLength <= BUFFER_REUSE_THRESHOLD ? this.bytes : new BytesRef(); decompressor.Decompress(fieldsStream, totalLength, offset, length, bytes); Debug.Assert(bytes.Length == length); documentInput = new ByteArrayDataInput(bytes.Bytes, bytes.Offset, bytes.Length); } for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++) { long infoAndBits = documentInput.ReadVInt64(); int fieldNumber = (int)((long)((ulong)infoAndBits >> CompressingStoredFieldsWriter.TYPE_BITS)); FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = (int)(infoAndBits & CompressingStoredFieldsWriter.TYPE_MASK); Debug.Assert(bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE, "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { case StoredFieldVisitor.Status.YES: ReadField(documentInput, visitor, fieldInfo, bits); break; case StoredFieldVisitor.Status.NO: SkipField(documentInput, bits); break; case StoredFieldVisitor.Status.STOP: return; } } }
public override long GetFilePointer() { return(_indexInput.GetFilePointer()); }
// Does initial decode of next block of terms; this // doesn't actually decode the docFreq, totalTermFreq, // postings details (frq/prx offset, etc.) metadata; // it just loads them as byte[] blobs which are then // decoded on-demand if the metadata is ever requested // for any term in this block. This enables terms-only // intensive consumes (eg certain MTQs, respelling) to // not pay the price of decoding metadata they won't // use. private bool NextBlock() { // TODO: we still lazy-decode the byte[] for each // term (the suffix), but, if we decoded // all N terms up front then seeking could do a fast // bsearch w/in the block... //System.out.println("BTR.nextBlock() fp=" + in.getFilePointer() + " this=" + this); state.BlockFilePointer = input.GetFilePointer(); blockTermCount = input.ReadVInt32(); //System.out.println(" blockTermCount=" + blockTermCount); if (blockTermCount == 0) { return(false); } termBlockPrefix = input.ReadVInt32(); // term suffixes: int len = input.ReadVInt32(); if (termSuffixes.Length < len) { termSuffixes = new byte[ArrayUtil.Oversize(len, 1)]; } //System.out.println(" termSuffixes len=" + len); input.ReadBytes(termSuffixes, 0, len); termSuffixesReader.Reset(termSuffixes, 0, len); // docFreq, totalTermFreq len = input.ReadVInt32(); if (docFreqBytes.Length < len) { docFreqBytes = new byte[ArrayUtil.Oversize(len, 1)]; } //System.out.println(" freq bytes len=" + len); input.ReadBytes(docFreqBytes, 0, len); freqReader.Reset(docFreqBytes, 0, len); // metadata len = input.ReadVInt32(); if (bytes == null) { bytes = new byte[ArrayUtil.Oversize(len, 1)]; bytesReader = new ByteArrayDataInput(); } else if (bytes.Length < len) { bytes = new byte[ArrayUtil.Oversize(len, 1)]; } input.ReadBytes(bytes, 0, len); bytesReader.Reset(bytes, 0, len); metaDataUpto = 0; state.TermBlockOrd = 0; blocksSinceSeek++; indexIsCurrent = indexIsCurrent && (blocksSinceSeek < outerInstance.outerInstance.indexReader.Divisor); //System.out.println(" indexIsCurrent=" + indexIsCurrent); return(true); }
public override int NextDoc() { bool first = true; _in.Seek(_nextDocStart); long posStart = 0; while (true) { long lineStart = _in.GetFilePointer(); SimpleTextUtil.ReadLine(_in, _scratch); //System.out.println("NEXT DOC: " + scratch.utf8ToString()); if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.DOC)) { if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) { _nextDocStart = lineStart; _in.Seek(posStart); return(_docId); } UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.DOC.Length, _scratch.Length - SimpleTextFieldsWriter.DOC.Length, _scratchUtf16); _docId = ArrayUtil.ParseInt32(_scratchUtf16.Chars, 0, _scratchUtf16.Length); _tf = 0; first = false; } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FREQ)) { UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.FREQ.Length, _scratch.Length - SimpleTextFieldsWriter.FREQ.Length, _scratchUtf16); _tf = ArrayUtil.ParseInt32(_scratchUtf16.Chars, 0, _scratchUtf16.Length); posStart = _in.GetFilePointer(); } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS)) { // skip } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET)) { // skip } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET)) { // skip } else if (StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.PAYLOAD)) { // skip } else { if (Debugging.AssertsEnabled) { Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END)); } if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) { _nextDocStart = lineStart; _in.Seek(posStart); return(_docId); } return(_docId = NO_MORE_DOCS); } } }
public virtual void TestRandomAccessClones() { SetUp_2(); CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false); // Open two files IndexInput e1 = cr.OpenInput("f11", NewIOContext(Random())); IndexInput e2 = cr.OpenInput("f3", NewIOContext(Random())); IndexInput a1 = (IndexInput)e1.Clone(); IndexInput a2 = (IndexInput)e2.Clone(); // Seek the first pair e1.Seek(100); a1.Seek(100); Assert.AreEqual(100, e1.GetFilePointer()); Assert.AreEqual(100, a1.GetFilePointer()); byte be1 = e1.ReadByte(); byte ba1 = a1.ReadByte(); Assert.AreEqual(be1, ba1); // Now seek the second pair e2.Seek(1027); a2.Seek(1027); Assert.AreEqual(1027, e2.GetFilePointer()); Assert.AreEqual(1027, a2.GetFilePointer()); byte be2 = e2.ReadByte(); byte ba2 = a2.ReadByte(); Assert.AreEqual(be2, ba2); // Now make sure the first one didn't move Assert.AreEqual(101, e1.GetFilePointer()); Assert.AreEqual(101, a1.GetFilePointer()); be1 = e1.ReadByte(); ba1 = a1.ReadByte(); Assert.AreEqual(be1, ba1); // Now more the first one again, past the buffer length e1.Seek(1910); a1.Seek(1910); Assert.AreEqual(1910, e1.GetFilePointer()); Assert.AreEqual(1910, a1.GetFilePointer()); be1 = e1.ReadByte(); ba1 = a1.ReadByte(); Assert.AreEqual(be1, ba1); // Now make sure the second set didn't move Assert.AreEqual(1028, e2.GetFilePointer()); Assert.AreEqual(1028, a2.GetFilePointer()); be2 = e2.ReadByte(); ba2 = a2.ReadByte(); Assert.AreEqual(be2, ba2); // Move the second set back, again cross the buffer size e2.Seek(17); a2.Seek(17); Assert.AreEqual(17, e2.GetFilePointer()); Assert.AreEqual(17, a2.GetFilePointer()); be2 = e2.ReadByte(); ba2 = a2.ReadByte(); Assert.AreEqual(be2, ba2); // Finally, make sure the first set didn't move // Now make sure the first one didn't move Assert.AreEqual(1911, e1.GetFilePointer()); Assert.AreEqual(1911, a1.GetFilePointer()); be1 = e1.ReadByte(); ba1 = a1.ReadByte(); Assert.AreEqual(be1, ba1); e1.Dispose(); e2.Dispose(); a1.Dispose(); a2.Dispose(); cr.Dispose(); }
public virtual void TestEncodeDecode() { int iterations = RandomInts.RandomInt32Between(Random, 1, 1000); float AcceptableOverheadRatio = (float)Random.NextDouble(); int[] values = new int[(iterations - 1) * Lucene41PostingsFormat.BLOCK_SIZE + ForUtil.MAX_DATA_SIZE]; for (int i = 0; i < iterations; ++i) { int bpv = Random.Next(32); if (bpv == 0) { int value = RandomInts.RandomInt32Between(Random, 0, int.MaxValue); for (int j = 0; j < Lucene41PostingsFormat.BLOCK_SIZE; ++j) { values[i * Lucene41PostingsFormat.BLOCK_SIZE + j] = value; } } else { for (int j = 0; j < Lucene41PostingsFormat.BLOCK_SIZE; ++j) { values[i * Lucene41PostingsFormat.BLOCK_SIZE + j] = RandomInts.RandomInt32Between(Random, 0, (int)PackedInt32s.MaxValue(bpv)); } } } Directory d = new RAMDirectory(); long endPointer; { // encode IndexOutput @out = d.CreateOutput("test.bin", IOContext.DEFAULT); ForUtil forUtil = new ForUtil(AcceptableOverheadRatio, @out); for (int i = 0; i < iterations; ++i) { forUtil.WriteBlock(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, values.Length), new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], @out); } endPointer = @out.GetFilePointer(); @out.Dispose(); } { // decode IndexInput @in = d.OpenInput("test.bin", IOContext.READ_ONCE); ForUtil forUtil = new ForUtil(@in); for (int i = 0; i < iterations; ++i) { if (Random.NextBoolean()) { forUtil.SkipBlock(@in); continue; } int[] restored = new int[Lucene41.ForUtil.MAX_DATA_SIZE]; forUtil.ReadBlock(@in, new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], restored); Assert.AreEqual(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, (i + 1) * Lucene41PostingsFormat.BLOCK_SIZE), Arrays.CopyOf(restored, Lucene41PostingsFormat.BLOCK_SIZE)); } assertEquals(endPointer, @in.GetFilePointer()); @in.Dispose(); } }