internal DirectDocValuesConsumer(SegmentWriteState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension) { maxDoc = state.SegmentInfo.DocCount; bool success = false; try { string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension); data = state.Directory.CreateOutput(dataName, state.Context); CodecUtil.WriteHeader(data, dataCodec, DirectDocValuesProducer.VERSION_CURRENT); string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension); meta = state.Directory.CreateOutput(metaName, state.Context); CodecUtil.WriteHeader(meta, metaCodec, DirectDocValuesProducer.VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(this); } } }
protected internal FixedIntBlockIndexOutput(IndexOutput output, int fixedBlockSize) { _blockSize = fixedBlockSize; OUTPUT = output; output.WriteVInt(_blockSize); BUFFER = new int[_blockSize]; }
public FSTTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) { var termsFileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, TERMS_EXTENSION); _postingsWriter = postingsWriter; _fieldInfos = state.FieldInfos; _output = state.Directory.CreateOutput(termsFileName, state.Context); var success = false; try { WriteHeader(_output); _postingsWriter.Init(_output); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(_output); } } }
protected override void Dispose(bool disposing) { if (!disposing) { return; } var success = false; try { if (meta != null) { meta.WriteVInt(-1); // write EOF marker CodecUtil.WriteFooter(meta); // write checksum } if (data != null) { CodecUtil.WriteFooter(data); } success = true; } finally { if (success) { IOUtils.Close(data, meta); } else { IOUtils.CloseWhileHandlingException(data, meta); } data = meta = null; } }
protected override void Dispose(bool disposing) { if (data == null || disposing) { return; } var success = false; try { Debug.Assert(_fieldsSeen.Count > 0); // java : sheisty to do this here? SimpleTextUtil.Write(data, END); SimpleTextUtil.WriteNewline(data); SimpleTextUtil.WriteChecksum(data, scratch); success = true; } finally { if (success) { IOUtils.Close(data); } else { IOUtils.CloseWhileHandlingException(data); } data = null; } }
public static void WriteChecksum(IndexOutput output, BytesRef scratch) { // Pad with zeros so different checksum values use the // same number of bytes // (BaseIndexFileFormatTestCase.testMergeStability cares): var checksum = string.Format("{0:D}", output.Checksum); Write(output, CHECKSUM); Write(output, checksum, scratch); WriteNewline(output); }
protected override void Dispose(bool disposing) { if (!disposing) { return; } try { IOUtils.Close(_output); } finally { _output = null; } }
public SimpleTextStoredFieldsWriter(Directory directory, string segment, IOContext context) { _directory = directory; _segment = segment; var success = false; try { _output = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context); success = true; } finally { if (!success) { Abort(); } } }
public SimpleTextTermVectorsWriter(Directory directory, string segment, IOContext context) { _directory = directory; _segment = segment; bool success = false; try { _output = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", VECTORS_EXTENSION), context); success = true; } finally { if (!success) { Abort(); } } }
public override void WriteLiveDocs(MutableBits bits, Directory dir, SegmentCommitInfo info, int newDelCount, IOContext context) { var set = ((SimpleTextBits)bits).BITS; var size = bits.Length(); var scratch = new BytesRef(); var fileName = IndexFileNames.FileNameFromGeneration(info.Info.Name, LIVEDOCS_EXTENSION, info.NextDelGen); IndexOutput output = null; var success = false; try { output = dir.CreateOutput(fileName, context); SimpleTextUtil.Write(output, SIZE); SimpleTextUtil.Write(output, Convert.ToString(size), scratch); SimpleTextUtil.WriteNewline(output); for (int i = set.NextSetBit(0); i >= 0; i = set.NextSetBit(i + 1)) { SimpleTextUtil.Write(output, DOC); SimpleTextUtil.Write(output, Convert.ToString(i), scratch); SimpleTextUtil.WriteNewline(output); } SimpleTextUtil.Write(output, END); SimpleTextUtil.WriteNewline(output); SimpleTextUtil.WriteChecksum(output, scratch); success = true; } finally { if (success) { IOUtils.Close(output); } else { IOUtils.CloseWhileHandlingException(output); } } }
public override void Dispose() { if (_output == null) { return; } IOException ioe = null; try { // write field summary var dirStart = _output.FilePointer; _output.WriteVInt(_fields.Count); foreach (var field in _fields) { _output.WriteVInt(field.FieldInfo.Number); _output.WriteVLong(field.NumTerms); if (field.FieldInfo.FieldIndexOptions != IndexOptions.DOCS_ONLY) { _output.WriteVLong(field.SumTotalTermFreq); } _output.WriteVLong(field.SumDocFreq); _output.WriteVInt(field.DocCount); _output.WriteVInt(field.LongsSize); field.Dict.Save(_output); } WriteTrailer(_output, dirStart); CodecUtil.WriteFooter(_output); } catch (IOException ioe2) { ioe = ioe2; } finally { IOUtils.CloseWhileHandlingException(ioe, _output, _postingsWriter); _output = null; } }
protected override void Dispose(bool disposing) { if (disposing) { // TODO: add a finish() at least to PushBase? DV too...? bool success = false; try { if (DocOut != null) { CodecUtil.WriteFooter(DocOut); } if (PosOut != null) { CodecUtil.WriteFooter(PosOut); } if (PayOut != null) { CodecUtil.WriteFooter(PayOut); } success = true; } finally { if (success) { IOUtils.Close(DocOut, PosOut, PayOut); } else { IOUtils.CloseWhileHandlingException(DocOut, PosOut, PayOut); } DocOut = PosOut = PayOut = null; } } }
private static void WriteTrailer(IndexOutput output, long dirStart) { output.WriteLong(dirStart); }
private void WriteHeader(IndexOutput output) { CodecUtil.WriteHeader(output, TERMS_CODEC_NAME, TERMS_VERSION_CURRENT); }
protected override void Dispose(bool disposing) { if (data == null || !disposing) return; var success = false; try { Debug.Assert(_fieldsSeen.Count > 0); // java : sheisty to do this here? SimpleTextUtil.Write(data, END); SimpleTextUtil.WriteNewline(data); SimpleTextUtil.WriteChecksum(data, scratch); success = true; } finally { if (success) { IOUtils.Close(data); } else { IOUtils.CloseWhileHandlingException(data); } data = null; } }
private readonly HashSet<string> _fieldsSeen = new HashSet<string>(); // for asserting public SimpleTextDocValuesWriter(SegmentWriteState state, string ext) { data = state.Directory.CreateOutput( IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, ext), state.Context); numDocs = state.SegmentInfo.DocCount; }
public FSTOrdTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) { var termsIndexFileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, TERMS_INDEX_EXTENSION); var termsBlockFileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, TERMS_BLOCK_EXTENSION); this.postingsWriter = postingsWriter; fieldInfos = state.FieldInfos; var success = false; try { indexOut = state.Directory.CreateOutput(termsIndexFileName, state.Context); blockOut = state.Directory.CreateOutput(termsBlockFileName, state.Context); WriteHeader(indexOut); WriteHeader(blockOut); this.postingsWriter.Init(blockOut); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(indexOut, blockOut); } } }
protected override void Dispose(bool disposing) { if (!disposing) return; try { IOUtils.Close(_output); } finally { _output = null; } }
public override void Init(IndexOutput termsOut) { CodecUtil.WriteHeader(termsOut, TERMS_CODEC, VERSION_CURRENT); termsOut.WriteVInt(Lucene41PostingsFormat.BLOCK_SIZE); }
// TODO what Var-Var codecs exist in practice... and what are there blocksizes like? // if its less than 128 we should set that as max and use byte? /// <summary> /// NOTE: maxBlockSize must be the maximum block size /// plus the max non-causal lookahead of your codec. EG Simple9 /// requires lookahead=1 because on seeing the Nth value /// it knows it must now encode the N-1 values before it. /// </summary> protected internal VariableIntBlockIndexOutput(IndexOutput output, int maxBlockSize) { OUTPUT = output; output.WriteInt(maxBlockSize); }
public override void Dispose() { if (_output == null) return; IOException ioe = null; try { // write field summary var dirStart = _output.FilePointer; _output.WriteVInt(_fields.Count); foreach (var field in _fields) { _output.WriteVInt(field.FieldInfo.Number); _output.WriteVLong(field.NumTerms); if (field.FieldInfo.FieldIndexOptions != IndexOptions.DOCS_ONLY) { _output.WriteVLong(field.SumTotalTermFreq); } _output.WriteVLong(field.SumDocFreq); _output.WriteVInt(field.DocCount); _output.WriteVInt(field.LongsSize); field.Dict.Save(_output); } WriteTrailer(_output, dirStart); CodecUtil.WriteFooter(_output); } catch (IOException ioe2) { ioe = ioe2; } finally { IOUtils.CloseWhileHandlingException(ioe, _output, _postingsWriter); _output = null; } }
public override void Dispose() { if (blockOut == null) return; IOException ioe = null; try { var blockDirStart = blockOut.FilePointer; // write field summary blockOut.WriteVInt(_fields.Count); foreach (var field in _fields) { blockOut.WriteVInt(field.FieldInfo.Number); blockOut.WriteVLong(field.NumTerms); if (field.FieldInfo.FieldIndexOptions != IndexOptions.DOCS_ONLY) { blockOut.WriteVLong(field.SumTotalTermFreq); } blockOut.WriteVLong(field.SumDocFreq); blockOut.WriteVInt(field.DocCount); blockOut.WriteVInt(field.LongsSize); blockOut.WriteVLong(field.StatsOut.FilePointer); blockOut.WriteVLong(field.MetaLongsOut.FilePointer); blockOut.WriteVLong(field.MetaBytesOut.FilePointer); field.SkipOut.WriteTo(blockOut); field.StatsOut.WriteTo(blockOut); field.MetaLongsOut.WriteTo(blockOut); field.MetaBytesOut.WriteTo(blockOut); field.Dict.Save(indexOut); } WriteTrailer(blockOut, blockDirStart); CodecUtil.WriteFooter(indexOut); CodecUtil.WriteFooter(blockOut); } catch (IOException ioe2) { ioe = ioe2; } finally { IOUtils.CloseWhileHandlingException(ioe, blockOut, indexOut, postingsWriter); blockOut = null; } }
private readonly HashSet <string> _fieldsSeen = new HashSet <string>(); // for asserting public SimpleTextDocValuesWriter(SegmentWriteState state, string ext) { data = state.Directory.CreateOutput( IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, ext), state.Context); numDocs = state.SegmentInfo.DocCount; }
/// <summary> /// Creates a postings writer with the specified PackedInts overhead ratio </summary> // TODO: does this ctor even make sense? public Lucene41PostingsWriter(SegmentWriteState state, float acceptableOverheadRatio) : base() { DocOut = state.Directory.CreateOutput(IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene41PostingsFormat.DOC_EXTENSION), state.Context); IndexOutput posOut = null; IndexOutput payOut = null; bool success = false; try { CodecUtil.WriteHeader(DocOut, DOC_CODEC, VERSION_CURRENT); ForUtil = new ForUtil(acceptableOverheadRatio, DocOut); if (state.FieldInfos.HasProx()) { PosDeltaBuffer = new int[ForUtil.MAX_DATA_SIZE]; posOut = state.Directory.CreateOutput(IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene41PostingsFormat.POS_EXTENSION), state.Context); CodecUtil.WriteHeader(posOut, POS_CODEC, VERSION_CURRENT); if (state.FieldInfos.HasPayloads()) { PayloadBytes = new byte[128]; PayloadLengthBuffer = new int[ForUtil.MAX_DATA_SIZE]; } else { PayloadBytes = null; PayloadLengthBuffer = null; } if (state.FieldInfos.HasOffsets()) { OffsetStartDeltaBuffer = new int[ForUtil.MAX_DATA_SIZE]; OffsetLengthBuffer = new int[ForUtil.MAX_DATA_SIZE]; } else { OffsetStartDeltaBuffer = null; OffsetLengthBuffer = null; } if (state.FieldInfos.HasPayloads() || state.FieldInfos.HasOffsets()) { payOut = state.Directory.CreateOutput(IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene41PostingsFormat.PAY_EXTENSION), state.Context); CodecUtil.WriteHeader(payOut, PAY_CODEC, VERSION_CURRENT); } } else { PosDeltaBuffer = null; PayloadLengthBuffer = null; OffsetStartDeltaBuffer = null; OffsetLengthBuffer = null; PayloadBytes = null; } this.PayOut = payOut; this.PosOut = posOut; success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(DocOut, posOut, payOut); } } DocDeltaBuffer = new int[ForUtil.MAX_DATA_SIZE]; FreqBuffer = new int[ForUtil.MAX_DATA_SIZE]; // TODO: should we try skipping every 2/4 blocks...? SkipWriter = new Lucene41SkipWriter(MaxSkipLevels, Lucene41PostingsFormat.BLOCK_SIZE, state.SegmentInfo.DocCount, DocOut, posOut, payOut); Encoded = new byte[ForUtil.MAX_ENCODED_SIZE]; }
/// <summary> /// Creates a postings writer with the specified PackedInts overhead ratio </summary> // TODO: does this ctor even make sense? public Lucene41PostingsWriter(SegmentWriteState state, float acceptableOverheadRatio) : base() { DocOut = state.Directory.CreateOutput(IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene41PostingsFormat.DOC_EXTENSION), state.Context); IndexOutput posOut = null; IndexOutput payOut = null; bool success = false; try { CodecUtil.WriteHeader(DocOut, DOC_CODEC, VERSION_CURRENT); ForUtil = new ForUtil(acceptableOverheadRatio, DocOut); if (state.FieldInfos.HasProx()) { PosDeltaBuffer = new int[ForUtil.MAX_DATA_SIZE]; posOut = state.Directory.CreateOutput(IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene41PostingsFormat.POS_EXTENSION), state.Context); CodecUtil.WriteHeader(posOut, POS_CODEC, VERSION_CURRENT); if (state.FieldInfos.HasPayloads()) { PayloadBytes = new sbyte[128]; PayloadLengthBuffer = new int[ForUtil.MAX_DATA_SIZE]; } else { PayloadBytes = null; PayloadLengthBuffer = null; } if (state.FieldInfos.HasOffsets()) { OffsetStartDeltaBuffer = new int[ForUtil.MAX_DATA_SIZE]; OffsetLengthBuffer = new int[ForUtil.MAX_DATA_SIZE]; } else { OffsetStartDeltaBuffer = null; OffsetLengthBuffer = null; } if (state.FieldInfos.HasPayloads() || state.FieldInfos.HasOffsets()) { payOut = state.Directory.CreateOutput(IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene41PostingsFormat.PAY_EXTENSION), state.Context); CodecUtil.WriteHeader(payOut, PAY_CODEC, VERSION_CURRENT); } } else { PosDeltaBuffer = null; PayloadLengthBuffer = null; OffsetStartDeltaBuffer = null; OffsetLengthBuffer = null; PayloadBytes = null; } this.PayOut = payOut; this.PosOut = posOut; success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(DocOut, posOut, payOut); } } DocDeltaBuffer = new int[ForUtil.MAX_DATA_SIZE]; FreqBuffer = new int[ForUtil.MAX_DATA_SIZE]; // TODO: should we try skipping every 2/4 blocks...? SkipWriter = new Lucene41SkipWriter(MaxSkipLevels, Lucene41PostingsFormat.BLOCK_SIZE, state.SegmentInfo.DocCount, DocOut, posOut, payOut); Encoded = new byte[ForUtil.MAX_ENCODED_SIZE]; }