public override void Read(DataInput indexIn, bool absolute) { if (absolute) { fp = indexIn.ReadVLong(); } else { fp += indexIn.ReadVLong(); } }
public override bool Load(DataInput input) { lock (this) { count = input.ReadVLong(); this.higherWeightsCompletion = new FSTCompletion(new FST<object>(input, NoOutputs.Singleton)); this.normalCompletion = new FSTCompletion(higherWeightsCompletion.FST, false, exactMatchFirst); return true; } }
public override void Read(DataInput indexIn, bool absolute) { if (absolute) { upto = indexIn.ReadVInt(); fp = indexIn.ReadVLong(); } else { int uptoDelta = indexIn.ReadVInt(); if ((uptoDelta & 1) == 1) { // same block upto += (int)((uint)uptoDelta >> 1); } else { // new block upto = (int)((uint)uptoDelta >> 1); fp += indexIn.ReadVLong(); } } // TODO: we can't do this assert because non-causal // int encoders can have upto over the buffer size //assert upto < maxBlockSize: "upto=" + upto + " max=" + maxBlockSize; }
public override bool Load(DataInput input) { lock (this) { count = input.ReadVLong(); root = new TernaryTreeNode(); ReadRecursively(input, root); return true; } }
public override bool Load(DataInput input) { count = input.ReadVLong(); var root = new JaspellTernarySearchTrie.TSTNode(trie, '\0', null); ReadRecursively(input, root); trie.Root = root; return true; }
public override void DecodeTerm(long[] empty, DataInput input, FieldInfo fieldInfo, BlockTermState _termState, bool absolute) { PulsingTermState termState = (PulsingTermState) _termState; Debug.Debug.Assert((empty.Length == 0); termState.Absolute = termState.Absolute || absolute; // if we have positions, its total TF, otherwise its computed based on docFreq. // TODO Double check this is right.. long count = FieldInfo.IndexOptions_e.DOCS_AND_FREQS_AND_POSITIONS.CompareTo(fieldInfo.IndexOptions) <= 0 ? termState.TotalTermFreq : termState.DocFreq; //System.out.println(" count=" + count + " threshold=" + maxPositions); if (count <= maxPositions) { // Inlined into terms dict -- just read the byte[] blob in, // but don't decode it now (we only decode when a DocsEnum // or D&PEnum is pulled): termState.PostingsSize = input.ReadVInt(); if (termState.Postings == null || termState.Postings.Length < termState.PostingsSize) { termState.Postings = new byte[ArrayUtil.Oversize(termState.PostingsSize, 1)]; } // TODO: sort of silly to copy from one big byte[] // (the blob holding all inlined terms' blobs for // current term block) into another byte[] (just the // blob for this term)... input.ReadBytes(termState.Postings, 0, termState.PostingsSize); //System.out.println(" inlined bytes=" + termState.postingsSize); termState.Absolute = termState.Absolute || absolute; } else { int longsSize = fields == null ? 0 : fields[fieldInfo.Number]; if (termState.Longs == null) { termState.Longs = new long[longsSize]; } for (int i = 0; i < longsSize; i++) { termState.Longs[i] = input.ReadVLong(); } termState.PostingsSize = -1; termState.WrappedTermState.DocFreq = termState.DocFreq; termState.WrappedTermState.TotalTermFreq = termState.TotalTermFreq; _wrappedPostingsReader.DecodeTerm(termState.Longs, input, fieldInfo, termState.WrappedTermState, termState.Absolute); termState.Absolute = false; } }
public override void Read(DataInput indexIn, bool absolute) { if (absolute) { upto = indexIn.ReadVInt(); fp = indexIn.ReadVLong(); } else { int uptoDelta = indexIn.ReadVInt(); if ((uptoDelta & 1) == 1) { // same block upto += (int)((uint)uptoDelta >> 1); } else { // new block upto = (int)((uint)uptoDelta >> 1); fp += indexIn.ReadVLong(); } } Debug.Assert(upto < outerInstance.blockSize); }
private void _decodeTerm(DataInput @in, FieldInfo fieldInfo, Lucene41PostingsWriter.IntBlockTermState termState) { bool fieldHasPositions = fieldInfo.FieldIndexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; bool fieldHasOffsets = fieldInfo.FieldIndexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; bool fieldHasPayloads = fieldInfo.HasPayloads(); if (termState.DocFreq == 1) { termState.SingletonDocID = @in.ReadVInt(); } else { termState.SingletonDocID = -1; termState.DocStartFP += @in.ReadVLong(); } if (fieldHasPositions) { termState.PosStartFP += @in.ReadVLong(); if (termState.TotalTermFreq > Lucene41PostingsFormat.BLOCK_SIZE) { termState.LastPosBlockOffset = @in.ReadVLong(); } else { termState.LastPosBlockOffset = -1; } if ((fieldHasPayloads || fieldHasOffsets) && termState.TotalTermFreq >= Lucene41PostingsFormat.BLOCK_SIZE) { termState.PayStartFP += @in.ReadVLong(); } } if (termState.DocFreq > Lucene41PostingsFormat.BLOCK_SIZE) { termState.SkipOffset = @in.ReadVLong(); } else { termState.SkipOffset = -1; } }
public override void DecodeTerm(long[] longs, DataInput @in, FieldInfo fieldInfo, BlockTermState _termState, bool absolute) { Lucene41PostingsWriter.IntBlockTermState termState = (Lucene41PostingsWriter.IntBlockTermState)_termState; bool fieldHasPositions = fieldInfo.FieldIndexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; bool fieldHasOffsets = fieldInfo.FieldIndexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; bool fieldHasPayloads = fieldInfo.HasPayloads(); if (absolute) { termState.DocStartFP = 0; termState.PosStartFP = 0; termState.PayStartFP = 0; } if (Version < Lucene41PostingsWriter.VERSION_META_ARRAY) // backward compatibility { _decodeTerm(@in, fieldInfo, termState); return; } termState.DocStartFP += longs[0]; if (fieldHasPositions) { termState.PosStartFP += longs[1]; if (fieldHasOffsets || fieldHasPayloads) { termState.PayStartFP += longs[2]; } } if (termState.DocFreq == 1) { termState.SingletonDocID = @in.ReadVInt(); } else { termState.SingletonDocID = -1; } if (fieldHasPositions) { if (termState.TotalTermFreq > Lucene41PostingsFormat.BLOCK_SIZE) { termState.LastPosBlockOffset = @in.ReadVLong(); } else { termState.LastPosBlockOffset = -1; } } if (termState.DocFreq > Lucene41PostingsFormat.BLOCK_SIZE) { termState.SkipOffset = @in.ReadVLong(); } else { termState.SkipOffset = -1; } }