private void ReadFields(IndexInput meta) { int fieldNumber = meta.ReadVInt32(); while (fieldNumber != -1) { int fieldType = meta.ReadByte(); if (fieldType == NUMBER) { numerics[fieldNumber] = ReadNumericEntry(meta); } else if (fieldType == BYTES) { binaries[fieldNumber] = ReadBinaryEntry(meta); } else if (fieldType == SORTED) { sorteds[fieldNumber] = ReadSortedEntry(meta); } else if (fieldType == SORTED_SET) { sortedSets[fieldNumber] = ReadSortedSetEntry(meta); } else { throw new CorruptIndexException("invalid entry type: " + fieldType + ", input=" + meta); } fieldNumber = meta.ReadVInt32(); } }
public override void VisitDocument(int n, StoredFieldVisitor visitor) { SeekIndex(n); fieldsStream.Seek(indexStream.ReadInt64()); int numFields = fieldsStream.ReadVInt32(); for (int fieldIDX = 0; fieldIDX < numFields; fieldIDX++) { int fieldNumber = fieldsStream.ReadVInt32(); FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = fieldsStream.ReadByte() & 0xFF; Debug.Assert(bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { case StoredFieldVisitor.Status.YES: ReadField(visitor, fieldInfo, bits); break; case StoredFieldVisitor.Status.NO: SkipField(bits); break; case StoredFieldVisitor.Status.STOP: return; } } }
private void ReadFields(IndexInput meta, FieldInfos infos) { int fieldNumber = meta.ReadVInt32(); while (fieldNumber != -1) { // check should be: infos.fieldInfo(fieldNumber) != null, which incorporates negative check // but docvalues updates are currently buggy here (loading extra stuff, etc): LUCENE-5616 if (fieldNumber < 0) { // trickier to validate more: because we re-use for norms, because we use multiple entries // for "composite" types like sortedset, etc. throw new Exception("Invalid field number: " + fieldNumber + " (resource=" + meta + ")"); } byte type = meta.ReadByte(); if (type == Lucene45DocValuesFormat.NUMERIC) { numerics[fieldNumber] = ReadNumericEntry(meta); } else if (type == Lucene45DocValuesFormat.BINARY) { BinaryEntry b = ReadBinaryEntry(meta); binaries[fieldNumber] = b; } else if (type == Lucene45DocValuesFormat.SORTED) { ReadSortedField(fieldNumber, meta, infos); } else if (type == Lucene45DocValuesFormat.SORTED_SET) { SortedSetEntry ss = ReadSortedSetEntry(meta); sortedSets[fieldNumber] = ss; if (ss.Format == Lucene45DocValuesConsumer.SORTED_SET_WITH_ADDRESSES) { ReadSortedSetFieldWithAddresses(fieldNumber, meta, infos); } else if (ss.Format == Lucene45DocValuesConsumer.SORTED_SET_SINGLE_VALUED_SORTED) { if (meta.ReadVInt32() != fieldNumber) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } if (meta.ReadByte() != Lucene45DocValuesFormat.SORTED) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } ReadSortedField(fieldNumber, meta, infos); } else { throw new Exception(); } } else { throw new Exception("invalid type: " + type + ", resource=" + meta); } fieldNumber = meta.ReadVInt32(); } }
private void ReadSortedField(int fieldNumber, IndexInput meta, FieldInfos infos) { // sorted = binary + numeric if (meta.ReadVInt32() != fieldNumber) { throw new Exception("sorted entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } if (meta.ReadByte() != Lucene45DocValuesFormat.BINARY) { throw new Exception("sorted entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } BinaryEntry b = ReadBinaryEntry(meta); binaries[fieldNumber] = b; if (meta.ReadVInt32() != fieldNumber) { throw new Exception("sorted entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } if (meta.ReadByte() != Lucene45DocValuesFormat.NUMERIC) { throw new Exception("sorted entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } NumericEntry n = ReadNumericEntry(meta); ords[fieldNumber] = n; }
/// <summary> /// Increments the enumeration to the next element. True if one exists. </summary> public bool Next() { prevBuffer.Set(termBuffer); //System.out.println(" ste setPrev=" + prev() + " this=" + this); if (position++ >= size - 1) { termBuffer.Reset(); //System.out.println(" EOF"); return(false); } termBuffer.Read(input, fieldInfos); newSuffixStart = termBuffer.newSuffixStart; termInfo.DocFreq = input.ReadVInt32(); // read doc freq termInfo.FreqPointer += input.ReadVInt64(); // read freq pointer termInfo.ProxPointer += input.ReadVInt64(); // read prox pointer if (termInfo.DocFreq >= skipInterval) { termInfo.SkipOffset = input.ReadVInt32(); } if (isIndex) { indexPointer += input.ReadVInt64(); // read index pointer } //System.out.println(" ste ret term=" + term()); return(true); }
protected override int ReadSkipData(int level, IndexInput skipStream) { int delta; if (currentFieldStoresPayloads) { // the current field stores payloads. // if the doc delta is odd then we have // to read the current payload length // because it differs from the length of the // previous payload delta = skipStream.ReadVInt32(); if ((delta & 1) != 0) { payloadLength[level] = skipStream.ReadVInt32(); } delta = delta.TripleShift(1); } else { delta = skipStream.ReadVInt32(); } freqPointer[level] += skipStream.ReadVInt32(); proxPointer[level] += skipStream.ReadVInt32(); return(delta); }
//private readonly string segment; // LUCENENET: Not used public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string segment, int indexDivisor, string segmentSuffix, IOContext context) { input = dir.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION), new IOContext(context, true)); //this.segment = segment; // LUCENENET: Not used bool success = false; if (Debugging.AssertsEnabled) { Debugging.Assert(indexDivisor == -1 || indexDivisor > 0); } try { version = ReadHeader(input); this.indexDivisor = indexDivisor; if (version >= VariableGapTermsIndexWriter.VERSION_CHECKSUM) { CodecUtil.ChecksumEntireFile(input); } SeekDir(input, dirOffset); // Read directory int numFields = input.ReadVInt32(); if (numFields < 0) { throw new CorruptIndexException("invalid numFields: " + numFields + " (resource=" + input + ")"); } for (int i = 0; i < numFields; i++) { int field = input.ReadVInt32(); long indexStart = input.ReadVInt64(); FieldInfo fieldInfo = fieldInfos.FieldInfo(field); FieldIndexData previous = fields.Put(fieldInfo, new FieldIndexData(this, /* fieldInfo, // LUCENENET: Not referenced */ indexStart)); if (previous != null) { throw new CorruptIndexException("duplicate field: " + fieldInfo.Name + " (resource=" + input + ")"); } } success = true; } finally { if (indexDivisor > 0) { input.Dispose(); input = null; if (success) { indexLoaded = true; } } } }
public virtual int ReadBlock() { buffer[0] = input.ReadVInt32(); int count = buffer[0] <= 3 ? baseBlockSize - 1 : 2 * baseBlockSize - 1; Debug.Assert(buffer.Length >= count, "buffer.length=" + buffer.Length + " count=" + count); for (int i = 0; i < count; i++) { buffer[i + 1] = input.ReadVInt32(); } return(1 + count); }
public FSTTermsReader(SegmentReadState state, PostingsReaderBase postingsReader) { string termsFileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, FSTTermsWriter.TERMS_EXTENSION); this.postingsReader = postingsReader; IndexInput @in = state.Directory.OpenInput(termsFileName, state.Context); bool success = false; try { version = ReadHeader(@in); if (version >= FSTTermsWriter.TERMS_VERSION_CHECKSUM) { CodecUtil.ChecksumEntireFile(@in); } this.postingsReader.Init(@in); SeekDir(@in); FieldInfos fieldInfos = state.FieldInfos; int numFields = @in.ReadVInt32(); for (int i = 0; i < numFields; i++) { int fieldNumber = @in.ReadVInt32(); FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); long numTerms = @in.ReadVInt64(); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : @in.ReadVInt64(); long sumDocFreq = @in.ReadVInt64(); int docCount = @in.ReadVInt32(); int longsSize = @in.ReadVInt32(); TermsReader current = new TermsReader(this, fieldInfo, @in, numTerms, sumTotalTermFreq, sumDocFreq, docCount, longsSize); TermsReader previous; // LUCENENET NOTE: This simulates a put operation in Java, // getting the prior value first before setting it. fields.TryGetValue(fieldInfo.Name, out previous); fields[fieldInfo.Name] = current; CheckFieldSummary(state.SegmentInfo, @in, current, previous); } success = true; } finally { if (success) { IOUtils.Dispose(@in); } else { IOUtils.DisposeWhileHandlingException(@in); } } }
public override int NextDoc() { if (++docIt >= upto) { return(DocIdSetIterator.NO_MORE_DOCS); } postingInput.Seek(offsets[docIt]); currFreq = postingInput.ReadVInt32(); // reset variables used in nextPosition pos = 0; endOffset = 0; return(docs[docIt]); }
internal static BinaryEntry ReadBinaryEntry(IndexInput meta) { BinaryEntry entry = new BinaryEntry(); entry.format = meta.ReadVInt32(); entry.missingOffset = meta.ReadInt64(); entry.minLength = meta.ReadVInt32(); entry.maxLength = meta.ReadVInt32(); entry.Count = meta.ReadVInt64(); entry.offset = meta.ReadInt64(); switch (entry.format) { case Lucene45DocValuesConsumer.BINARY_FIXED_UNCOMPRESSED: break; case Lucene45DocValuesConsumer.BINARY_PREFIX_COMPRESSED: entry.AddressInterval = meta.ReadVInt32(); entry.AddressesOffset = meta.ReadInt64(); entry.PackedInt32sVersion = meta.ReadVInt32(); entry.BlockSize = meta.ReadVInt32(); break; case Lucene45DocValuesConsumer.BINARY_VARIABLE_UNCOMPRESSED: entry.AddressesOffset = meta.ReadInt64(); entry.PackedInt32sVersion = meta.ReadVInt32(); entry.BlockSize = meta.ReadVInt32(); break; default: throw new Exception("Unknown format: " + entry.format + ", input=" + meta); } return(entry); }
private BytesRef DoNext() { if (++currentOrd >= outerInstance.numValues) { return(null); } else { int start = input.ReadVInt32(); int suffix = input.ReadVInt32(); input.ReadBytes(termBuffer.Bytes, start, suffix); termBuffer.Length = start + suffix; return(termBuffer); } }
public virtual int ReadBlock() { buffer[0] = input.ReadVInt32(); int count = buffer[0] <= 3 ? baseBlockSize - 1 : 2 * baseBlockSize - 1; if (Debugging.AssertsEnabled) { Debugging.Assert(buffer.Length >= count, "buffer.length={0} count={1}", buffer.Length, count); } for (int i = 0; i < count; i++) { buffer[i + 1] = input.ReadVInt32(); } return(1 + count); }
public void Read(IndexInput input, FieldInfos fieldInfos) { this.term = null; // invalidate cache newSuffixStart = input.ReadVInt32(); int length = input.ReadVInt32(); int totalLength = newSuffixStart + length; if (Debugging.AssertsEnabled) { Debugging.Assert(totalLength <= ByteBlockPool.BYTE_BLOCK_SIZE - 2, () => "termLength=" + totalLength + ",resource=" + input); } if (bytes.Bytes.Length < totalLength) { bytes.Grow(totalLength); } bytes.Length = totalLength; input.ReadBytes(bytes.Bytes, newSuffixStart, length); int fieldNumber = input.ReadVInt32(); if (fieldNumber != currentFieldNumber) { currentFieldNumber = fieldNumber; // NOTE: too much sneakiness here, seriously this is a negative vint?! if (currentFieldNumber == -1) { field = ""; } else { if (Debugging.AssertsEnabled) { Debugging.Assert(fieldInfos.FieldInfo(currentFieldNumber) != null, currentFieldNumber.ToString); } field = fieldInfos.FieldInfo(currentFieldNumber).Name.Intern(); } } else { if (Debugging.AssertsEnabled) { Debugging.Assert(field.Equals(fieldInfos.FieldInfo(fieldNumber).Name, StringComparison.Ordinal), () => "currentFieldNumber=" + currentFieldNumber + " field=" + field + " vs " + fieldInfos.FieldInfo(fieldNumber) == null ? "null" : fieldInfos.FieldInfo(fieldNumber).Name); } } }
public void ReadBlock() { for (int i = 0; i < buffer.Length; i++) { buffer[i] = @in.ReadVInt32(); } }
public override int NextPosition() { if (lazyProxPointer != -1) { proxIn.Seek(lazyProxPointer); lazyProxPointer = -1; } // scan over any docs that were iterated without their positions if (posPendingCount > freq) { position = 0; while (posPendingCount != freq) { if ((proxIn.ReadByte() & 0x80) == 0) { posPendingCount--; } } } position += proxIn.ReadVInt32(); posPendingCount--; Debug.Assert(posPendingCount >= 0, "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); return(position); }
protected internal override int ScanTo(int target) { int docAcc = m_accum; int frq = 1; IndexInput freqIn = this.freqIn; bool omitTF = m_indexOmitsTF; int loopLimit = m_limit; for (int i = m_ord; i < loopLimit; i++) { int code = freqIn.ReadVInt32(); if (omitTF) { docAcc += code; } else { docAcc += code.TripleShift(1); // shift off low bit frq = ReadFreq(freqIn, code); } if (docAcc >= target) { m_freq = frq; m_ord = i + 1; return(m_accum = docAcc); } } m_ord = m_limit; m_freq = frq; m_accum = docAcc; return(NO_MORE_DOCS); }
// LUCENENET specific - factored out DoNext() and made into MoveNext() public override bool MoveNext() { if (++currentOrd >= outerInstance.numValues) { return(false); } else { int start = input.ReadVInt32(); int suffix = input.ReadVInt32(); input.ReadBytes(termBuffer.Bytes, start, suffix); termBuffer.Length = start + suffix; SetTerm(); return(true); } }
/// <summary> /// Read the next block of data (<c>For</c> format). /// </summary> /// <param name="in"> The input to use to read data. </param> /// <param name="encoded"> A buffer that can be used to store encoded data. </param> /// <param name="decoded"> Where to write decoded data. </param> /// <exception cref="IOException"> If there is a low-level I/O error. </exception> internal void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded) { int numBits = @in.ReadByte(); if (Debugging.AssertsEnabled) { Debugging.Assert(numBits <= 32, numBits.ToString); } if (numBits == ALL_VALUES_EQUAL) { int value = @in.ReadVInt32(); Arrays.Fill(decoded, 0, Lucene41PostingsFormat.BLOCK_SIZE, value); return; } int encodedSize = encodedSizes[numBits]; @in.ReadBytes(encoded, 0, encodedSize); PackedInt32s.IDecoder decoder = decoders[numBits]; int iters = iterations[numBits]; if (Debugging.AssertsEnabled) { Debugging.Assert(iters * decoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); } decoder.Decode(encoded, 0, decoded, 0, iters); }
public override FieldsProducer FieldsProducer(SegmentReadState readState) { // Load our ID: string idFileName = IndexFileNames.SegmentFileName(readState.SegmentInfo.Name, readState.SegmentSuffix, ID_EXTENSION); IndexInput @in = readState.Directory.OpenInput(idFileName, readState.Context); bool success = false; int id; try { CodecUtil.CheckHeader(@in, RAM_ONLY_NAME, VERSION_START, VERSION_LATEST); id = @in.ReadVInt32(); success = true; } finally { if (!success) { IOUtils.DisposeWhileHandlingException(@in); } else { IOUtils.Dispose(@in); } } lock (state) { return(state[id]); } }
protected internal override int NextUnreadDoc() { int docAcc = m_accum; int frq = 1; IndexInput freqIn = this.freqIn; bool omitTF = m_indexOmitsTF; int loopLimit = m_limit; IBits liveDocs = this.m_liveDocs; for (int i = m_ord; i < loopLimit; i++) { int code = freqIn.ReadVInt32(); if (omitTF) { docAcc += code; } else { docAcc += (int)((uint)code >> 1); // shift off low bit frq = ReadFreq(freqIn, code); } if (liveDocs.Get(docAcc)) { m_freq = frq; m_ord = i + 1; return(m_accum = docAcc); } } m_ord = m_limit; m_freq = frq; m_accum = docAcc; return(NO_MORE_DOCS); }
public override void Init(IndexInput termsIn) { _version = CodecUtil.CheckHeader(termsIn, PulsingPostingsWriter.CODEC, PulsingPostingsWriter.VERSION_START, PulsingPostingsWriter.VERSION_CURRENT); _maxPositions = termsIn.ReadVInt32(); _wrappedPostingsReader.Init(termsIn); if (_wrappedPostingsReader is PulsingPostingsReader || _version < PulsingPostingsWriter.VERSION_META_ARRAY) { _fields = null; } else { _fields = new SortedDictionary <int, int>(); var summaryFileName = IndexFileNames.SegmentFileName(_segmentState.SegmentInfo.Name, _segmentState.SegmentSuffix, PulsingPostingsWriter.SUMMARY_EXTENSION); IndexInput input = null; try { input = _segmentState.Directory.OpenInput(summaryFileName, _segmentState.Context); CodecUtil.CheckHeader(input, PulsingPostingsWriter.CODEC, _version, PulsingPostingsWriter.VERSION_CURRENT); var numField = input.ReadVInt32(); for (var i = 0; i < numField; i++) { var fieldNum = input.ReadVInt32(); var longsSize = input.ReadVInt32(); _fields.Add(fieldNum, longsSize); } } finally { IOUtils.CloseWhileHandlingException(input); } } }
public TermsReader(FieldInfos fieldInfos, IndexInput @in, int termCount) { this.termCount = termCount; int fieldNumber = @in.ReadVInt32(); field = fieldInfos.FieldInfo(fieldNumber); if (field.IndexOptions != IndexOptions.DOCS_ONLY) { sumTotalTermFreq = @in.ReadVInt64(); } else { sumTotalTermFreq = -1; } sumDocFreq = @in.ReadVInt64(); docCount = @in.ReadVInt32(); fst = new FST <BytesRef>(@in, outputs); }
/// <exception cref="IOException"/> protected override int ReadSkipData(int level, IndexInput skipStream) { int delta; if (Debugging.AssertsEnabled) { Debugging.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !currentFieldStoresPayloads); } if (currentFieldStoresPayloads) { // the current field stores payloads. // if the doc delta is odd then we have // to read the current payload length // because it differs from the length of the // previous payload delta = skipStream.ReadVInt32(); if ((delta & 1) != 0) { payloadLength[level] = skipStream.ReadVInt32(); } //delta >>>= 1; delta = (int)((uint)delta >> 1); } else { delta = skipStream.ReadVInt32(); } if (indexOptions != IndexOptions.DOCS_ONLY) { freqIndex[level].Read(skipStream, false); } docIndex[level].Read(skipStream, false); if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) { posIndex[level].Read(skipStream, false); if (currentFieldStoresPayloads) { payloadPointer[level] += skipStream.ReadVInt32(); } } return(delta); }
internal static NumericEntry ReadNumericEntry(IndexInput meta) { NumericEntry entry = new NumericEntry(); entry.format = meta.ReadVInt32(); entry.missingOffset = meta.ReadInt64(); entry.PackedInt32sVersion = meta.ReadVInt32(); entry.Offset = meta.ReadInt64(); entry.Count = meta.ReadVInt64(); entry.BlockSize = meta.ReadVInt32(); switch (entry.format) { case Lucene45DocValuesConsumer.GCD_COMPRESSED: entry.minValue = meta.ReadInt64(); entry.gcd = meta.ReadInt64(); break; case Lucene45DocValuesConsumer.TABLE_COMPRESSED: if (entry.Count > int.MaxValue) { throw new Exception("Cannot use TABLE_COMPRESSED with more than MAX_VALUE values, input=" + meta); } int uniqueValues = meta.ReadVInt32(); if (uniqueValues > 256) { throw new Exception("TABLE_COMPRESSED cannot have more than 256 distinct values, input=" + meta); } entry.table = new long[uniqueValues]; for (int i = 0; i < uniqueValues; ++i) { entry.table[i] = meta.ReadInt64(); } break; case Lucene45DocValuesConsumer.DELTA_COMPRESSED: break; default: throw new Exception("Unknown format: " + entry.format + ", input=" + meta); } return(entry); }
internal static int ReadFreq(IndexInput freqIn, int code) // LUCENENET: CA1822: Mark members as static { if ((code & 1) != 0) // if low bit is set { return(1); // freq is one } else { return(freqIn.ReadVInt32()); // else read freq } }
internal int ReadFreq(IndexInput freqIn, int code) { if ((code & 1) != 0) // if low bit is set { return(1); // freq is one } else { return(freqIn.ReadVInt32()); // else read freq } }
protected override int ReadSkipData(int level, IndexInput skipStream) { int delta; if (currentFieldStoresPayloads || currentFieldStoresOffsets) { // the current field stores payloads and/or offsets. // if the doc delta is odd then we have // to read the current payload/offset lengths // because it differs from the lengths of the // previous payload/offset delta = skipStream.ReadVInt32(); if ((delta & 1) != 0) { if (currentFieldStoresPayloads) { payloadLength[level] = skipStream.ReadVInt32(); } if (currentFieldStoresOffsets) { offsetLength[level] = skipStream.ReadVInt32(); } } delta = (int)((uint)delta >> 1); } else { delta = skipStream.ReadVInt32(); } freqPointer[level] += skipStream.ReadVInt32(); proxPointer[level] += skipStream.ReadVInt32(); return(delta); }
private void ReadSortedSetFieldWithAddresses(int fieldNumber, IndexInput meta /*, FieldInfos infos // LUCENENET: Never read */) { // sortedset = binary + numeric (addresses) + ordIndex if (meta.ReadVInt32() != fieldNumber) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } if (meta.ReadByte() != Lucene45DocValuesFormat.BINARY) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } BinaryEntry b = ReadBinaryEntry(meta); binaries[fieldNumber] = b; if (meta.ReadVInt32() != fieldNumber) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } if (meta.ReadByte() != Lucene45DocValuesFormat.NUMERIC) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } NumericEntry n1 = ReadNumericEntry(meta); ords[fieldNumber] = n1; if (meta.ReadVInt32() != fieldNumber) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } if (meta.ReadByte() != Lucene45DocValuesFormat.NUMERIC) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } NumericEntry n2 = ReadNumericEntry(meta); ordIndexes[fieldNumber] = n2; }
public override int NextDoc() { // if (DEBUG) System.out.println("SPR.nextDoc seg=" + segment + " freqIn.fp=" + freqIn.getFilePointer()); while (true) { if (ord == limit) { // if (DEBUG) System.out.println(" return END"); return(doc = NO_MORE_DOCS); } ord++; // Decode next doc/freq pair int code = freqIn.ReadVInt32(); accum += (int)((uint)code >> 1); // shift off low bit if ((code & 1) != 0) // if low bit is set { freq = 1; // freq is one } else { freq = freqIn.ReadVInt32(); // else read freq } posPendingCount += freq; if (liveDocs == null || liveDocs.Get(accum)) { break; } } position = 0; // if (DEBUG) System.out.println(" return doc=" + doc); return(doc = accum); }