internal MemoryDocValuesConsumer(SegmentWriteState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension, float acceptableOverheadRatio) { this.acceptableOverheadRatio = acceptableOverheadRatio; maxDoc = state.SegmentInfo.DocCount; var success = false; try { var dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension); data = state.Directory.CreateOutput(dataName, state.Context); CodecUtil.WriteHeader(data, dataCodec, MemoryDocValuesProducer.VERSION_CURRENT); var metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension); meta = state.Directory.CreateOutput(metaName, state.Context); CodecUtil.WriteHeader(meta, metaCodec, MemoryDocValuesProducer.VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(this); } } }
// TODO: maybe this int[] should instead be the output to the FST... protected virtual void WriteTargetMap(string filename) { //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) { DataOutput @out = new OutputStreamDataOutput(os); CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION); int numSourceIds = lastSourceId + 1; @out.WriteVInt32(targetMapEndOffset); // <-- size of main array @out.WriteVInt32(numSourceIds + 1); // <-- size of offset array (+ 1 more entry) int prev = 0, sourceId = 0; for (int ofs = 0; ofs < targetMapEndOffset; ofs++) { int val = targetMap[ofs], delta = val - prev; Debug.Assert(delta >= 0); if (ofs == targetMapOffsets[sourceId]) { @out.WriteVInt32((delta << 1) | 0x01); sourceId++; } else { @out.WriteVInt32((delta << 1)); } prev += delta; } Debug.Assert(sourceId == numSourceIds, "sourceId:" + sourceId + " != numSourceIds:" + numSourceIds); } }
public void Write(string baseDir) { //string filename = baseDir + System.IO.Path.DirectorySeparatorChar + // typeof(ConnectionCosts).FullName.Replace('.', System.IO.Path.DirectorySeparatorChar) + ConnectionCosts.FILENAME_SUFFIX; // LUCENENET specific: we don't need to do a "classpath" output directory, since we // are changing the implementation to read files dynamically instead of making the // user recompile with the new files. string filename = System.IO.Path.Combine(baseDir, typeof(ConnectionCosts).Name + CharacterDefinition.FILENAME_SUFFIX); //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) { DataOutput @out = new OutputStreamDataOutput(os); CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER, ConnectionCosts.VERSION); @out.WriteVInt32(forwardSize); @out.WriteVInt32(backwardSize); int last = 0; Debug.Assert(costs.Length == backwardSize); foreach (short[] a in costs) { Debug.Assert(a.Length == forwardSize); for (int i = 0; i < a.Length; i++) { int delta = (int)a[i] - last; @out.WriteVInt32((delta >> 31) ^ (delta << 1)); last = a[i]; } } } }
protected virtual void WriteDictionary(string filename) { //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) { DataOutput @out = new OutputStreamDataOutput(os); CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER, BinaryDictionary.VERSION); @out.WriteVInt32(m_buffer.Position); //WritableByteChannel channel = Channels.newChannel(os); // Write Buffer m_buffer.Flip(); // set position to 0, set limit to current position //channel.write(buffer); while (m_buffer.HasRemaining) { @out.WriteByte(m_buffer.Get()); } if (Debugging.AssertsEnabled) { Debugging.Assert(m_buffer.Remaining == 0L); } } }
/// <summary> /// Sole constructor. </summary> public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext context) { Debug.Assert(directory != null); this.directory = directory; this.segment = segment; bool success = false; try { fieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context); indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION), context); CodecUtil.WriteHeader(fieldsStream, CODEC_NAME_DAT, VERSION_CURRENT); CodecUtil.WriteHeader(indexStream, CODEC_NAME_IDX, VERSION_CURRENT); Debug.Assert(HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); Debug.Assert(HEADER_LENGTH_IDX == indexStream.GetFilePointer()); success = true; } finally { if (!success) { Abort(); } } }
private void AddFixedSortedBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable <BytesRef> values, IEnumerable <long?> docToOrd, int length) { field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_SORTED.Name); CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT); CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT); /* values */ data.WriteInt(length); int valueCount = 0; foreach (BytesRef v in values) { data.WriteBytes(v.Bytes, v.Offset, v.Length); valueCount++; } /* ordinals */ index.WriteInt(valueCount); int maxDoc = State.SegmentInfo.DocCount; Debug.Assert(valueCount > 0); PackedInts.Writer w = PackedInts.GetWriter(index, maxDoc, PackedInts.BitsRequired(valueCount - 1), PackedInts.DEFAULT); foreach (long n in docToOrd) { w.Add((long)n); } w.Finish(); }
private void AddVarIntsField(FieldInfo field, IndexOutput output, IEnumerable <long?> values, long minValue, long maxValue) { field.PutAttribute(LegacyKey, LegacyDocValuesType.VAR_INTS.Name); CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.VAR_INTS_CODEC_NAME, Lucene40DocValuesFormat.VAR_INTS_VERSION_CURRENT); long delta = maxValue - minValue; if (delta < 0) { // writes longs output.WriteByte((byte)Lucene40DocValuesFormat.VAR_INTS_FIXED_64); foreach (long?n in values) { output.WriteLong(n == null ? 0 : n.Value); } } else { // writes packed ints output.WriteByte((byte)Lucene40DocValuesFormat.VAR_INTS_PACKED); output.WriteLong(minValue); output.WriteLong(0 - minValue); // default value (representation of 0) PackedInts.Writer writer = PackedInts.GetWriter(output, State.SegmentInfo.DocCount, PackedInts.BitsRequired(delta), PackedInts.DEFAULT); foreach (long?n in values) { long v = n == null ? 0 : (long)n; writer.Add(v - minValue); } writer.Finish(); } }
/// <summary> /// Sole constructor. </summary> public Lucene40TermVectorsWriter(Directory directory, string segment, IOContext context) { this.directory = directory; this.segment = segment; bool success = false; try { // Open files for TermVector storage tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION), context); CodecUtil.WriteHeader(tvx, Lucene40TermVectorsReader.CODEC_NAME_INDEX, Lucene40TermVectorsReader.VERSION_CURRENT); tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context); CodecUtil.WriteHeader(tvd, Lucene40TermVectorsReader.CODEC_NAME_DOCS, Lucene40TermVectorsReader.VERSION_CURRENT); tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_FIELDS_EXTENSION), context); CodecUtil.WriteHeader(tvf, Lucene40TermVectorsReader.CODEC_NAME_FIELDS, Lucene40TermVectorsReader.VERSION_CURRENT); Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.GetFilePointer()); Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.GetFilePointer()); Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); success = true; } finally { if (!success) { Abort(); } } }
public void Write(string baseDir) { //string filename = baseDir + System.IO.Path.DirectorySeparatorChar + // typeof(CharacterDefinition).FullName.Replace('.', System.IO.Path.DirectorySeparatorChar) + CharacterDefinition.FILENAME_SUFFIX; // LUCENENET specific: we don't need to do a "classpath" output directory, since we // are changing the implementation to read files dynamically instead of making the // user recompile with the new files. string filename = System.IO.Path.Combine(baseDir, typeof(CharacterDefinition).Name + CharacterDefinition.FILENAME_SUFFIX); //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(baseDir)); using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) { DataOutput @out = new OutputStreamDataOutput(os); CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER, CharacterDefinition.VERSION); @out.WriteBytes(characterCategoryMap, 0, characterCategoryMap.Length); for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++) { byte b = (byte)( (invokeMap[i] ? 0x01 : 0x00) | (groupMap[i] ? 0x02 : 0x00) ); @out.WriteByte(b); } } }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: @Override public codecs.FieldsConsumer fieldsConsumer(index.SegmentWriteState state) throws java.io.IOException public override FieldsConsumer fieldsConsumer(SegmentWriteState state) { //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final String fileName = index.IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); string fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final store.IndexOutput out = state.directory.createOutput(fileName, state.context); IndexOutput @out = state.directory.createOutput(fileName, state.context); bool success = false; try { CodecUtil.WriteHeader(@out, CODEC_NAME, VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(@out); } } return(new FieldsConsumerAnonymousInnerClassHelper(this, @out)); }
private IndexOutput GetOutput() { UninterruptableMonitor.Enter(this); try { if (dataOut == null) { bool success = false; try { dataOut = directory.CreateOutput(dataFileName, IOContext.DEFAULT); CodecUtil.WriteHeader(dataOut, DATA_CODEC, VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.DisposeWhileHandlingException(dataOut); } } } return(dataOut); } finally { UninterruptableMonitor.Exit(this); } }
protected virtual void WritePosDict(string filename) { //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) { DataOutput @out = new OutputStreamDataOutput(os); CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION); @out.WriteVInt32(posDict.Count); foreach (string s in posDict) { if (s == null) { @out.WriteByte((byte)0); @out.WriteByte((byte)0); @out.WriteByte((byte)0); } else { string[] data = CSVUtil.Parse(s); if (Debugging.AssertsEnabled) { Debugging.Assert(data.Length == 3, () => "malformed pos/inflection: " + s); } @out.WriteString(data[0]); @out.WriteString(data[1]); @out.WriteString(data[2]); } } } }
#pragma warning restore CA2213 // Disposable fields should be disposed /// <summary> /// Sole constructor. </summary> public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext context) { if (Debugging.AssertsEnabled) { Debugging.Assert(directory != null); } this.directory = directory; this.segment = segment; bool success = false; try { fieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context); indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION), context); CodecUtil.WriteHeader(fieldsStream, CODEC_NAME_DAT, VERSION_CURRENT); CodecUtil.WriteHeader(indexStream, CODEC_NAME_IDX, VERSION_CURRENT); if (Debugging.AssertsEnabled) { Debugging.Assert(HEADER_LENGTH_DAT == fieldsStream.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream Debugging.Assert(HEADER_LENGTH_IDX == indexStream.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream } success = true; } finally { if (!success) { Abort(); } } }
#pragma warning restore CA2213 // Disposable fields should be disposed /// <summary> /// Sole constructor. </summary> public Lucene40TermVectorsWriter(Directory directory, string segment, IOContext context) { this.directory = directory; this.segment = segment; bool success = false; try { // Open files for TermVector storage tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION), context); CodecUtil.WriteHeader(tvx, Lucene40TermVectorsReader.CODEC_NAME_INDEX, Lucene40TermVectorsReader.VERSION_CURRENT); tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context); CodecUtil.WriteHeader(tvd, Lucene40TermVectorsReader.CODEC_NAME_DOCS, Lucene40TermVectorsReader.VERSION_CURRENT); tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_FIELDS_EXTENSION), context); CodecUtil.WriteHeader(tvf, Lucene40TermVectorsReader.CODEC_NAME_FIELDS, Lucene40TermVectorsReader.VERSION_CURRENT); if (Debugging.AssertsEnabled) { Debugging.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream Debugging.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream Debugging.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream } success = true; } finally { if (!success) { Abort(); } } }
public override void Init(IndexOutput termsOut) { CodecUtil.WriteHeader(termsOut, Lucene40PostingsReader.TERMS_CODEC, Lucene40PostingsReader.VERSION_CURRENT); termsOut.WriteInt(SkipInterval); // write skipInterval termsOut.WriteInt(MaxSkipLevels); // write maxSkipLevels termsOut.WriteInt(SkipMinimum); // write skipMinimum }
internal Lucene42DocValuesConsumer(SegmentWriteState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension, float acceptableOverheadRatio) { this.AcceptableOverheadRatio = acceptableOverheadRatio; MaxDoc = state.SegmentInfo.DocCount; bool success = false; try { string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension); Data = state.Directory.CreateOutput(dataName, state.Context); // this writer writes the format 4.2 did! CodecUtil.WriteHeader(Data, dataCodec, Lucene42DocValuesProducer.VERSION_GCD_COMPRESSION); string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension); Meta = state.Directory.CreateOutput(metaName, state.Context); CodecUtil.WriteHeader(Meta, metaCodec, Lucene42DocValuesProducer.VERSION_GCD_COMPRESSION); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(this); } } }
internal void Persist() { lock (this) { string fileName = SNAPSHOTS_PREFIX + nextWriteGen; IndexOutput @out = dir.CreateOutput(fileName, IOContext.DEFAULT); bool success = false; try { CodecUtil.WriteHeader(@out, CODEC_NAME, VERSION_CURRENT); @out.WriteVInt32(m_refCounts.Count); foreach (KeyValuePair <long, int> ent in m_refCounts) { @out.WriteVInt64(ent.Key); @out.WriteVInt32(ent.Value); } success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(@out); try { dir.DeleteFile(fileName); } #pragma warning disable 168 catch (Exception e) #pragma warning restore 168 { // Suppress so we keep throwing original exception } } else { IOUtils.Close(@out); } } dir.Sync(/*Collections.singletonList(*/ new[] { fileName } /*)*/); if (nextWriteGen > 0) { string lastSaveFile = SNAPSHOTS_PREFIX + (nextWriteGen - 1); try { dir.DeleteFile(lastSaveFile); } #pragma warning disable 168 catch (IOException ioe) #pragma warning restore 168 { // OK: likely it didn't exist } } nextWriteGen++; } }
/// <summary> /// Writes this vector to the file <paramref name="name"/> in Directory /// <paramref name="d"/>, in a format that can be read by the constructor /// <see cref="BitVector(Directory, string, IOContext)"/>. /// </summary> public void Write(Directory d, string name, IOContext context) { if (Debugging.AssertsEnabled) { Debugging.Assert(!(d is CompoundFileDirectory)); } IndexOutput output = d.CreateOutput(name, context); try { output.WriteInt32(-2); CodecUtil.WriteHeader(output, CODEC, VERSION_CURRENT); if (IsSparse) { // sparse bit-set more efficiently saved as d-gaps. WriteClearedDgaps(output); } else { WriteBits(output); } CodecUtil.WriteFooter(output); if (Debugging.AssertsEnabled) { Debugging.Assert(VerifyCount()); } } finally { IOUtils.Dispose(output); } }
internal DirectDocValuesConsumer(SegmentWriteState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension) { maxDoc = state.SegmentInfo.DocCount; bool success = false; try { string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension); data = state.Directory.CreateOutput(dataName, state.Context); CodecUtil.WriteHeader(data, dataCodec, DirectDocValuesProducer.VERSION_CURRENT); string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension); meta = state.Directory.CreateOutput(metaName, state.Context); CodecUtil.WriteHeader(meta, metaCodec, DirectDocValuesProducer.VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.DisposeWhileHandlingException(this); } } }
public override void Init(IndexOutput termsOut) { _termsOut = termsOut; CodecUtil.WriteHeader(termsOut, CODEC, VERSION_CURRENT); termsOut.WriteVInt32(_pending.Length); // encode maxPositions in header _wrappedPostingsWriter.Init(termsOut); }
protected override void Dispose(bool disposing) { _wrappedPostingsWriter.Dispose(); if (_wrappedPostingsWriter is PulsingPostingsWriter || VERSION_CURRENT < VERSION_META_ARRAY) { return; } var summaryFileName = IndexFileNames.SegmentFileName(_segmentState.SegmentInfo.Name, _segmentState.SegmentSuffix, SUMMARY_EXTENSION); IndexOutput output = null; try { output = _segmentState.Directory.CreateOutput(summaryFileName, _segmentState.Context); CodecUtil.WriteHeader(output, CODEC, VERSION_CURRENT); output.WriteVInt32(_fields.Count); foreach (var field in _fields) { output.WriteVInt32(field.FieldNumber); output.WriteVInt32(field.Int64sSize); } output.Dispose(); } finally { IOUtils.CloseWhileHandlingException(output); } }
/// <summary> /// Sole constructor. </summary> public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) { if (Debugging.AssertsEnabled) { Debugging.Assert(directory != null); } this.directory = directory; this.segment = si.Name; this.segmentSuffix = segmentSuffix; this.compressionMode = compressionMode; this.compressor = compressionMode.NewCompressor(); this.chunkSize = chunkSize; numDocs = 0; pendingDocs = new LinkedList <DocData>(); termSuffixes = new GrowableByteArrayDataOutput(ArrayUtil.Oversize(chunkSize, 1)); payloadBytes = new GrowableByteArrayDataOutput(ArrayUtil.Oversize(1, 1)); lastTerm = new BytesRef(ArrayUtil.Oversize(30, 1)); bool success = false; IndexOutput indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, segmentSuffix, VECTORS_INDEX_EXTENSION), context); try { vectorsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, segmentSuffix, VECTORS_EXTENSION), context); string codecNameIdx = formatName + CODEC_SFX_IDX; string codecNameDat = formatName + CODEC_SFX_DAT; CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT); CodecUtil.WriteHeader(vectorsStream, codecNameDat, VERSION_CURRENT); if (Debugging.AssertsEnabled) { Debugging.Assert(CodecUtil.HeaderLength(codecNameDat) == vectorsStream.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream Debugging.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream } indexWriter = new CompressingStoredFieldsIndexWriter(indexStream); indexStream = null; vectorsStream.WriteVInt32(PackedInt32s.VERSION_CURRENT); vectorsStream.WriteVInt32(chunkSize); writer = new BlockPackedWriter(vectorsStream, BLOCK_SIZE); positionsBuf = new int[1024]; startOffsetsBuf = new int[1024]; lengthsBuf = new int[1024]; payloadLengthsBuf = new int[1024]; success = true; } finally { if (!success) { IOUtils.DisposeWhileHandlingException(indexStream); Abort(); } } }
public override void Init(IndexOutput termsOut) { CodecUtil.WriteHeader(termsOut, CODEC, VERSION_CURRENT); // TODO: -- just ask skipper to "start" here termsOut.WriteInt32(skipInterval); // write skipInterval termsOut.WriteInt32(maxSkipLevels); // write maxSkipLevels termsOut.WriteInt32(skipMinimum); // write skipMinimum }
public override void Init(IndexOutput termsOut) { CodecUtil.WriteHeader(termsOut, CODEC, VERSION_CURRENT); // TODO: -- just ask skipper to "start" here termsOut.WriteInt(SKIP_INTERVAL); // write skipInterval termsOut.WriteInt(MAX_SKIP_LEVELS); // write maxSkipLevels termsOut.WriteInt(SKIP_MINIMUM); // write skipMinimum }
private void AddIntsField(FieldInfo field, IndexOutput output, IEnumerable <long?> values) { field.PutAttribute(LegacyKey, LegacyDocValuesType.FIXED_INTS_32.Name); CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); output.WriteInt(4); // size foreach (long?n in values) { output.WriteInt(n == null ? 0 : (int)n); } }
public override bool Store(DataOutput output) { CodecUtil.WriteHeader(output, CODEC_NAME, VERSION_CURRENT); output.WriteVInt64(count); output.WriteByte(separator); output.WriteVInt32(grams); output.WriteVInt64(totTokens); fst.Save(output); return(true); }
[ExceptionToNetNumericConvention] // LUCENENET: Private API, keeping as-is private void AddIntsField(FieldInfo field, IndexOutput output, IEnumerable <long?> values) { field.PutAttribute(legacyKey, LegacyDocValuesType.FIXED_INTS_32.ToString()); CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); output.WriteInt32(4); // size foreach (long?n in values) { output.WriteInt32((int)n.GetValueOrDefault()); } }
private void WriteEntryTable(ICollection <FileEntry> entries, IndexOutput entryOut) { CodecUtil.WriteHeader(entryOut, ENTRY_CODEC, VERSION_CURRENT); entryOut.WriteVInt32(entries.Count); foreach (FileEntry fe in entries) { entryOut.WriteString(IndexFileNames.StripSegmentName(fe.File)); entryOut.WriteInt64(fe.Offset); entryOut.WriteInt64(fe.Length); } CodecUtil.WriteFooter(entryOut); }
private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable <BytesRef> values, IEnumerable <long?> docToOrd) { field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_VAR_SORTED.Name); CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_VAR_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_CURRENT); CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_VAR_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_CURRENT); /* values */ long startPos = data.FilePointer; int valueCount = 0; foreach (BytesRef v in values) { data.WriteBytes(v.Bytes, v.Offset, v.Length); valueCount++; } /* addresses */ long maxAddress = data.FilePointer - startPos; index.WriteLong(maxAddress); Debug.Assert(valueCount != int.MaxValue); // unsupported by the 4.0 impl PackedInts.Writer w = PackedInts.GetWriter(index, valueCount + 1, PackedInts.BitsRequired(maxAddress), PackedInts.DEFAULT); long currentPosition = 0; foreach (BytesRef v in values) { w.Add(currentPosition); currentPosition += v.Length; } // write sentinel Debug.Assert(currentPosition == maxAddress); w.Add(currentPosition); w.Finish(); /* ordinals */ int maxDoc = State.SegmentInfo.DocCount; Debug.Assert(valueCount > 0); PackedInts.Writer ords = PackedInts.GetWriter(index, maxDoc, PackedInts.BitsRequired(valueCount - 1), PackedInts.DEFAULT); foreach (long n in docToOrd) { ords.Add((long)n); } ords.Finish(); }
/// <summary> /// Sole constructor. </summary> public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) { Debug.Assert(directory != null); this.Directory = directory; this.Segment = si.Name; this.SegmentSuffix = segmentSuffix; this.CompressionMode = compressionMode; this.Compressor = compressionMode.NewCompressor(); this.ChunkSize = chunkSize; NumDocs = 0; PendingDocs = new LinkedList <DocData>(); TermSuffixes = new GrowableByteArrayDataOutput(ArrayUtil.Oversize(chunkSize, 1)); PayloadBytes = new GrowableByteArrayDataOutput(ArrayUtil.Oversize(1, 1)); LastTerm = new BytesRef(ArrayUtil.Oversize(30, 1)); bool success = false; IndexOutput indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(Segment, segmentSuffix, VECTORS_INDEX_EXTENSION), context); try { VectorsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(Segment, segmentSuffix, VECTORS_EXTENSION), context); string codecNameIdx = formatName + CODEC_SFX_IDX; string codecNameDat = formatName + CODEC_SFX_DAT; CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT); CodecUtil.WriteHeader(VectorsStream, codecNameDat, VERSION_CURRENT); Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == VectorsStream.FilePointer); Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.FilePointer); IndexWriter = new CompressingStoredFieldsIndexWriter(indexStream); indexStream = null; VectorsStream.WriteVInt(PackedInts.VERSION_CURRENT); VectorsStream.WriteVInt(chunkSize); Writer = new BlockPackedWriter(VectorsStream, BLOCK_SIZE); PositionsBuf = new int[1024]; StartOffsetsBuf = new int[1024]; LengthsBuf = new int[1024]; PayloadLengthsBuf = new int[1024]; success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(indexStream); Abort(); } } }