internal virtual DocsEnum Reset(FieldInfo fieldInfo, StandardTermState termState) { IndexOmitsTF = fieldInfo.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_ONLY; StorePayloads = fieldInfo.HasPayloads(); StoreOffsets = fieldInfo.FieldIndexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; FreqOffset = termState.FreqOffset; SkipOffset = termState.SkipOffset; // TODO: for full enum case (eg segment merging) this // seek is unnecessary; maybe we can avoid in such // cases FreqIn.Seek(termState.FreqOffset); Limit = termState.DocFreq; Debug.Assert(Limit > 0); Ord = 0; Doc = -1; Accum = 0; // if (DEBUG) System.out.println(" sde limit=" + limit + " freqFP=" + freqOffset); Skipped = false; Start = -1; Count = 0; Freq_Renamed = 1; if (IndexOmitsTF) { CollectionsHelper.Fill(Freqs, 1); } MaxBufferedDocId = -1; return(this); }
internal void Reset(int @base) { this.@base = @base; NextToUpdate = @base; CollectionsHelper.Fill(HashTable, -1); CollectionsHelper.Fill(ChainTable, (short)0); }
public virtual System.String GetUTF8String(int nTokens) { int upto = 0; CollectionsHelper.Fill(buffer, (char)0); for (int i = 0; i < nTokens; i++) { upto = AddUTF8Token(upto); } return(new System.String(buffer, 0, upto)); }
public void Init(long skipPointer, long freqBasePointer, long proxBasePointer, int df, bool storesPayloads) { base.Init(skipPointer, df); this.CurrentFieldStoresPayloads = storesPayloads; LastFreqPointer = freqBasePointer; LastProxPointer = proxBasePointer; CollectionsHelper.Fill(FreqPointer_Renamed, freqBasePointer); CollectionsHelper.Fill(ProxPointer_Renamed, proxBasePointer); CollectionsHelper.Fill(PayloadLength_Renamed, 0); }
/// <summary> /// Retrieve the length (in bytes) of the tvd and tvf /// entries for the next numDocs starting with /// startDocID. this is used for bulk copying when /// merging segments, if the field numbers are /// congruent. Once this returns, the tvf & tvd streams /// are seeked to the startDocID. /// </summary> internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int numDocs) { if (Tvx == null) { CollectionsHelper.Fill(tvdLengths, 0); CollectionsHelper.Fill(tvfLengths, 0); return; } SeekTvx(startDocID); long tvdPosition = Tvx.ReadLong(); Tvd.Seek(tvdPosition); long tvfPosition = Tvx.ReadLong(); Tvf.Seek(tvfPosition); long lastTvdPosition = tvdPosition; long lastTvfPosition = tvfPosition; int count = 0; while (count < numDocs) { int docID = startDocID + count + 1; Debug.Assert(docID <= NumTotalDocs); if (docID < NumTotalDocs) { tvdPosition = Tvx.ReadLong(); tvfPosition = Tvx.ReadLong(); } else { tvdPosition = Tvd.Length(); tvfPosition = Tvf.Length(); Debug.Assert(count == numDocs - 1); } tvdLengths[count] = (int)(tvdPosition - lastTvdPosition); tvfLengths[count] = (int)(tvfPosition - lastTvfPosition); count++; lastTvdPosition = tvdPosition; lastTvfPosition = tvfPosition; } }
public override void ResetSkip() { base.ResetSkip(); CollectionsHelper.Fill(LastSkipDoc, 0); CollectionsHelper.Fill(LastSkipDocPointer, DocOut.FilePointer); if (FieldHasPositions) { CollectionsHelper.Fill(LastSkipPosPointer, PosOut.FilePointer); if (FieldHasPayloads) { CollectionsHelper.Fill(LastPayloadByteUpto, 0); } if (FieldHasOffsets || FieldHasPayloads) { CollectionsHelper.Fill(LastSkipPayPointer, PayOut.FilePointer); } } }
public void Init(long skipPointer, long docBasePointer, long posBasePointer, long payBasePointer, int df) { base.Init(skipPointer, Trim(df)); LastDocPointer = docBasePointer; LastPosPointer = posBasePointer; LastPayPointer = payBasePointer; CollectionsHelper.Fill(DocPointer_Renamed, docBasePointer); if (PosPointer_Renamed != null) { CollectionsHelper.Fill(PosPointer_Renamed, posBasePointer); if (PayPointer_Renamed != null) { CollectionsHelper.Fill(PayPointer_Renamed, payBasePointer); } } else { Debug.Assert(posBasePointer == 0); } }
/// <summary> /// Read the next block of data (<code>For</code> format). /// </summary> /// <param name="in"> the input to use to read data </param> /// <param name="encoded"> a buffer that can be used to store encoded data </param> /// <param name="decoded"> where to write decoded data </param> /// <exception cref="IOException"> If there is a low-level I/O error </exception> public void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded) { int numBits = @in.ReadByte(); Debug.Assert(numBits <= 32, numBits.ToString()); if (numBits == ALL_VALUES_EQUAL) { int value = @in.ReadVInt(); CollectionsHelper.Fill(decoded, 0, Lucene41PostingsFormat.BLOCK_SIZE, value); return; } int encodedSize = EncodedSizes[numBits]; @in.ReadBytes(encoded, 0, encodedSize); PackedInts.Decoder decoder = Decoders[numBits]; int iters = Iterations[numBits]; Debug.Assert(iters * decoder.ByteValueCount() >= Lucene41PostingsFormat.BLOCK_SIZE); decoder.Decode(encoded, 0, decoded, 0, iters); }
/// <summary> /// Go to the chunk containing the provided doc ID. /// </summary> internal void Next(int doc) { Debug.Assert(doc >= DocBase + ChunkDocs, doc + " " + DocBase + " " + ChunkDocs); FieldsStream.Seek(OuterInstance.IndexReader.GetStartPointer(doc)); int docBase = FieldsStream.ReadVInt(); int chunkDocs = FieldsStream.ReadVInt(); if (docBase < this.DocBase + this.ChunkDocs || docBase + chunkDocs > OuterInstance.NumDocs) { throw new CorruptIndexException("Corrupted: current docBase=" + this.DocBase + ", current numDocs=" + this.ChunkDocs + ", new docBase=" + docBase + ", new numDocs=" + chunkDocs + " (resource=" + FieldsStream + ")"); } this.DocBase = docBase; this.ChunkDocs = chunkDocs; if (chunkDocs > NumStoredFields.Length) { int newLength = ArrayUtil.Oversize(chunkDocs, 4); NumStoredFields = new int[newLength]; Lengths = new int[newLength]; } if (chunkDocs == 1) { NumStoredFields[0] = FieldsStream.ReadVInt(); Lengths[0] = FieldsStream.ReadVInt(); } else { int bitsPerStoredFields = FieldsStream.ReadVInt(); if (bitsPerStoredFields == 0) { CollectionsHelper.Fill(NumStoredFields, 0, chunkDocs, FieldsStream.ReadVInt()); } else if (bitsPerStoredFields > 31) { throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields + " (resource=" + FieldsStream + ")"); } else { PackedInts.ReaderIterator it = PackedInts.GetReaderIteratorNoHeader(FieldsStream, PackedInts.Format.PACKED, OuterInstance.PackedIntsVersion, chunkDocs, bitsPerStoredFields, 1); for (int i = 0; i < chunkDocs; ++i) { NumStoredFields[i] = (int)it.Next(); } } int bitsPerLength = FieldsStream.ReadVInt(); if (bitsPerLength == 0) { CollectionsHelper.Fill(Lengths, 0, chunkDocs, FieldsStream.ReadVInt()); } else if (bitsPerLength > 31) { throw new CorruptIndexException("bitsPerLength=" + bitsPerLength); } else { PackedInts.ReaderIterator it = PackedInts.GetReaderIteratorNoHeader(FieldsStream, PackedInts.Format.PACKED, OuterInstance.PackedIntsVersion, chunkDocs, bitsPerLength, 1); for (int i = 0; i < chunkDocs; ++i) { Lengths[i] = (int)it.Next(); } } } }