public PreFlexRWStoredFieldsWriter(Directory directory, string segment, IOContext context) { Debug.Assert(directory != null); this.Directory = directory; this.Segment = segment; bool success = false; try { FieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION), context); IndexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION), context); FieldsStream.WriteInt(Lucene3xStoredFieldsReader.FORMAT_CURRENT); IndexStream.WriteInt(Lucene3xStoredFieldsReader.FORMAT_CURRENT); success = true; } finally { if (!success) { Abort(); } } }
internal void InitTermVectorsWriter() { lock (this) { if (tvx == null) { string docStoreSegment = docWriter.DocStoreSegment; if (docStoreSegment == null) { return; } System.Diagnostics.Debug.Assert(docStoreSegment != null); // If we hit an exception while init'ing the term // vector output files, we must abort this segment // because those files will be in an unknown // state: tvx = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION); tvd = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION); tvf = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION); tvx.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvd.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvf.WriteInt(TermVectorsReader.FORMAT_CURRENT); docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION); docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION); docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION); lastDocID = 0; } } }
/// <summary>Write as a d-gaps list </summary> private void WriteDgaps(IndexOutput output) { output.WriteInt(-1); // mark using d-gaps output.WriteInt(Size()); // write size output.WriteInt(Count()); // write count int last = 0; int n = Count(); int m = bits.Length; for (int i = 0; i < m && n > 0; i++) { if (bits[i] != 0) { output.WriteVInt(i - last); output.WriteByte(bits[i]); last = i; n -= BYTE_COUNTS[bits[i] & 0xFF]; } } }
/// <summary> Save this segment's info.</summary> internal void Write(IndexOutput output) { output.WriteString(name); output.WriteInt(docCount); output.WriteLong(delGen); output.WriteByte((byte)(hasSingleNormFile ? 1 : 0)); if (normGen == null) { output.WriteInt(-1); } else { output.WriteInt(normGen.Length); for (int j = 0; j < normGen.Length; j++) { output.WriteLong(normGen[j]); } } output.WriteByte((byte)isCompoundFile); }
private void Initialize(Directory directory, System.String segment, FieldInfos fis, int interval, bool isi) { indexInterval = interval; fieldInfos = fis; isIndex = isi; output = directory.CreateOutput(segment + (isIndex ? ".tii" : ".tis")); output.WriteInt(FORMAT); // write format output.WriteLong(0); // leave space for size output.WriteInt(indexInterval); // write indexInterval output.WriteInt(skipInterval); // write skipInterval }
public TermVectorsWriter(Directory directory, System.String segment, FieldInfos fieldInfos) { // Open files for TermVector storage tvx = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION); tvx.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvd = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION); tvd.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvf = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION); tvf.WriteInt(TermVectorsReader.FORMAT_CURRENT); this.fieldInfos = fieldInfos; }
public TermVectorsWriter(Directory directory, string segment, FieldInfos fieldInfos) { // Open files for TermVector storage tvx = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION); tvx.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvd = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION); tvd.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvf = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION); tvf.WriteInt(TermVectorsReader.FORMAT_CURRENT); this.fieldInfos = fieldInfos; }
public TermVectorsWriter(Directory directory, System.String segment, FieldInfos fieldInfos) { // Open files for TermVector storage tvx = directory.CreateOutput(segment + TVX_EXTENSION); tvx.WriteInt(FORMAT_VERSION); tvd = directory.CreateOutput(segment + TVD_EXTENSION); tvd.WriteInt(FORMAT_VERSION); tvf = directory.CreateOutput(segment + TVF_EXTENSION); tvf.WriteInt(FORMAT_VERSION); this.fieldInfos = fieldInfos; fields = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(fieldInfos.Size())); terms = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10)); }
private void WriteDeleteableFiles(System.Collections.ArrayList files) { IndexOutput output = directory.CreateOutput("deleteable.new"); try { output.WriteInt(files.Count); for (int i = 0; i < files.Count; i++) { output.WriteString((System.String)files[i]); } } finally { output.Close(); } directory.RenameFile("deleteable.new", IndexFileNames.DELETABLE); }
public PreFlexRWTermVectorsWriter(Directory directory, string segment, IOContext context) { this.Directory = directory; this.Segment = segment; bool success = false; try { // Open files for TermVector storage Tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), context); Tvx.WriteInt(Lucene3xTermVectorsReader.FORMAT_CURRENT); Tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context); Tvd.WriteInt(Lucene3xTermVectorsReader.FORMAT_CURRENT); Tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION), context); Tvf.WriteInt(Lucene3xTermVectorsReader.FORMAT_CURRENT); success = true; } finally { if (!success) { Abort(); } } }
/// <summary> /// Save a single segment's info. </summary> public override void Write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) { string fileName = IndexFileNames.SegmentFileName(si.Name, "", Lucene46SegmentInfoFormat.SI_EXTENSION); si.AddFile(fileName); IndexOutput output = dir.CreateOutput(fileName, ioContext); bool success = false; try { CodecUtil.WriteHeader(output, Lucene46SegmentInfoFormat.CODEC_NAME, Lucene46SegmentInfoFormat.VERSION_CURRENT); // Write the Lucene version that created this segment, since 3.1 output.WriteString(si.Version); output.WriteInt(si.DocCount); output.WriteByte((byte)(sbyte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO)); output.WriteStringStringMap(si.Diagnostics); output.WriteStringSet(si.Files); CodecUtil.WriteFooter(output); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(output); si.Dir.DeleteFile(fileName); } else { output.Dispose(); } } }
/// <summary> /// Write as a d-gaps list </summary> private void WriteClearedDgaps(IndexOutput output) { output.WriteInt(-1); // mark using d-gaps output.WriteInt(Size()); // write size output.WriteInt(Count()); // write count int last = 0; int numCleared = Size() - Count(); for (int i = 0; i < Bits.Length && numCleared > 0; i++) { if (Bits[i] != unchecked((byte)0xff)) { output.WriteVInt(i - last); output.WriteByte(Bits[i]); last = i; numCleared -= (8 - BitUtil.BitCount(Bits[i])); Debug.Assert(numCleared >= 0 || (i == (Bits.Length - 1) && numCleared == -(8 - (Size_Renamed & 7)))); } } }
private void AddShortsField(FieldInfo field, IndexOutput output, IEnumerable<long?> values) { field.PutAttribute(LegacyKey, LegacyDocValuesType.FIXED_INTS_16.Name); CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); output.WriteInt(2); // size foreach (long? n in values) { output.WriteShort(n == null ? (short)0 : (short)n); } }
public void Write(Directory directory) { System.String segmentFileName = GetNextSegmentFileName(); // Always advance the generation on write: if (generation == -1) { generation = 1; } else { generation++; } IndexOutput output = directory.CreateOutput(segmentFileName); bool success = false; try { output.WriteInt(CURRENT_FORMAT); // write FORMAT output.WriteLong(++version); // every write changes // the index output.WriteInt(counter); // write counter output.WriteInt(Count); // write infos for (int i = 0; i < Count; i++) { Info(i).Write(output); } } finally { try { output.Close(); success = true; } finally { if (!success) { // Try not to leave a truncated segments_N file in // the index: directory.DeleteFile(segmentFileName); } } } try { output = directory.CreateOutput(IndexFileNames.SEGMENTS_GEN); try { output.WriteInt(FORMAT_LOCKLESS); output.WriteLong(generation); output.WriteLong(generation); } finally { output.Close(); } } catch (System.IO.IOException e) { // It's OK if we fail to write this file since it's // used only as one of the retry fallbacks. } lastGeneration = generation; }
/// <summary>Write as a d-gaps list </summary> private void WriteDgaps(IndexOutput output) { output.WriteInt(- 1); // mark using d-gaps output.WriteInt(Size()); // write size output.WriteInt(Count()); // write count int last = 0; int n = Count(); int m = bits.Length; for (int i = 0; i < m && n > 0; i++) { if (bits[i] != 0) { output.WriteVInt(i - last); output.WriteByte(bits[i]); last = i; n -= BYTE_COUNTS[bits[i] & 0xFF]; } } }
/// <summary>Write as a bit set </summary> private void WriteBits(IndexOutput output) { output.WriteInt(Size()); // write size output.WriteInt(Count()); // write count output.WriteBytes(bits, bits.Length); }
internal void FinishCommit(Directory dir) { if (pendingSegnOutput == null) { throw new System.SystemException("prepareCommit was not called"); } bool success = false; try { pendingSegnOutput.FinishCommit(); pendingSegnOutput.Close(); pendingSegnOutput = null; success = true; } finally { if (!success) { RollbackCommit(dir); } } // NOTE: if we crash here, we have left a segments_N // file in the directory in a possibly corrupt state (if // some bytes made it to stable storage and others // didn't). But, the segments_N file includes checksum // at the end, which should catch this case. So when a // reader tries to read it, it will throw a // CorruptIndexException, which should cause the retry // logic in SegmentInfos to kick in and load the last // good (previous) segments_N-1 file. System.String fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation); success = false; try { dir.Sync(fileName); success = true; } finally { if (!success) { try { dir.DeleteFile(fileName); } catch (System.Exception t) { // Suppress so we keep throwing the original exception } } } lastGeneration = generation; try { IndexOutput genOutput = dir.CreateOutput(IndexFileNames.SEGMENTS_GEN); try { genOutput.WriteInt(FORMAT_LOCKLESS); genOutput.WriteLong(generation); genOutput.WriteLong(generation); } finally { genOutput.Close(); } } catch (System.Exception t) { // It's OK if we fail to write this file since it's // used only as one of the retry fallbacks. } }
public override void Init(IndexOutput termsOut) { CodecUtil.WriteHeader(termsOut, Lucene40PostingsReader.TERMS_CODEC, Lucene40PostingsReader.VERSION_CURRENT); termsOut.WriteInt(SkipInterval); // write skipInterval termsOut.WriteInt(MaxSkipLevels); // write maxSkipLevels termsOut.WriteInt(SkipMinimum); // write skipMinimum }
private void AddFixedStraightBytesField(FieldInfo field, IndexOutput output, IEnumerable<BytesRef> values, int length) { field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_STRAIGHT.Name); CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_CODEC_NAME, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_VERSION_CURRENT); output.WriteInt(length); foreach (BytesRef v in values) { if (v != null) { output.WriteBytes(v.Bytes, v.Offset, v.Length); } } }
private void Initialize(Directory directory, string segment, FieldInfos fis, int interval, bool isi) { IndexInterval = interval; FieldInfos = fis; IsIndex = isi; Output = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)), IOContext.DEFAULT); bool success = false; try { Output.WriteInt(FORMAT_CURRENT); // write format Output.WriteLong(0); // leave space for size Output.WriteInt(IndexInterval); // write indexInterval Output.WriteInt(SkipInterval); // write skipInterval Output.WriteInt(MaxSkipLevels); // write maxSkipLevels Debug.Assert(InitUTF16Results()); success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(Output); try { directory.DeleteFile(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION))); } catch (IOException ignored) { } } } }
internal FieldsWriter(Directory d, System.String segment, FieldInfos fn) { fieldInfos = fn; bool success = false; String fieldsName = segment + "." + IndexFileNames.FIELDS_EXTENSION; try { fieldsStream = d.CreateOutput(fieldsName); fieldsStream.WriteInt(FORMAT_CURRENT); success = true; } finally { if (!success) { try { Dispose(); } catch (System.Exception) { // Suppress so we keep throwing the original exception } try { d.DeleteFile(fieldsName); } catch (System.Exception) { // Suppress so we keep throwing the original exception } } } success = false; String indexName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION; try { indexStream = d.CreateOutput(indexName); indexStream.WriteInt(FORMAT_CURRENT); success = true; } finally { if (!success) { try { Dispose(); } catch (System.IO.IOException) { } try { d.DeleteFile(fieldsName); } catch (System.Exception) { // Suppress so we keep throwing the original exception } try { d.DeleteFile(indexName); } catch (System.Exception) { // Suppress so we keep throwing the original exception } } } doClose = true; }
private void AddFixedDerefBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values, int length) { field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_DEREF.Name); CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_CURRENT); CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_CURRENT); // deduplicate SortedSet<BytesRef> dictionary = new SortedSet<BytesRef>(); foreach (BytesRef v in values) { dictionary.Add(v == null ? new BytesRef() : BytesRef.DeepCopyOf(v)); } /* values */ data.WriteInt(length); foreach (BytesRef v in dictionary) { data.WriteBytes(v.Bytes, v.Offset, v.Length); } /* ordinals */ int valueCount = dictionary.Count; Debug.Assert(valueCount > 0); index.WriteInt(valueCount); int maxDoc = State.SegmentInfo.DocCount; PackedInts.Writer w = PackedInts.GetWriter(index, maxDoc, PackedInts.BitsRequired(valueCount - 1), PackedInts.DEFAULT); BytesRef brefDummy; foreach (BytesRef v in values) { brefDummy = v; if (v == null) { brefDummy = new BytesRef(); } //int ord = dictionary.HeadSet(brefDummy).Size(); int ord = dictionary.Count(@ref => @ref.CompareTo(brefDummy) < 0); w.Add(ord); } w.Finish(); }
/// <summary> Save this segment's info.</summary> internal void Write(IndexOutput output) { output.WriteString(name); output.WriteInt(docCount); output.WriteLong(delGen); output.WriteByte((byte) (hasSingleNormFile ? 1 : 0)); if (normGen == null) { output.WriteInt(- 1); } else { output.WriteInt(normGen.Length); for (int j = 0; j < normGen.Length; j++) { output.WriteLong(normGen[j]); } } output.WriteByte((byte) isCompoundFile); }
internal FieldsWriter(Directory d, System.String segment, FieldInfos fn) { fieldInfos = fn; bool success = false; System.String fieldsName = segment + "." + IndexFileNames.FIELDS_EXTENSION; try { fieldsStream = d.CreateOutput(fieldsName); fieldsStream.WriteInt(FORMAT_CURRENT); success = true; } finally { if (!success) { try { Close(); } catch (System.Exception t) { // Suppress so we keep throwing the original exception } try { d.DeleteFile(fieldsName); } catch (System.Exception t) { // Suppress so we keep throwing the original exception } } } success = false; System.String indexName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION; try { indexStream = d.CreateOutput(indexName); indexStream.WriteInt(FORMAT_CURRENT); success = true; } finally { if (!success) { try { Close(); } catch (System.IO.IOException ioe) { } try { d.DeleteFile(fieldsName); } catch (System.Exception t) { // Suppress so we keep throwing the original exception } try { d.DeleteFile(indexName); } catch (System.Exception t) { // Suppress so we keep throwing the original exception } } } doClose = true; }
internal void InitTermVectorsWriter() { lock (this) { if (tvx == null) { System.String docStoreSegment = docWriter.GetDocStoreSegment(); if (docStoreSegment == null) return ; System.Diagnostics.Debug.Assert(docStoreSegment != null); // If we hit an exception while init'ing the term // vector output files, we must abort this segment // because those files will be in an unknown // state: tvx = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION); tvd = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION); tvf = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION); tvx.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvd.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvf.WriteInt(TermVectorsReader.FORMAT_CURRENT); docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION); docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION); docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION); lastDocID = 0; } } }
private void Initialize(Directory directory, System.String segment, FieldInfos fis, int interval, bool isi) { indexInterval = interval; fieldInfos = fis; isIndex = isi; output = directory.CreateOutput(segment + (isIndex ? ".tii" : ".tis")); output.WriteInt(FORMAT_CURRENT); // write format output.WriteLong(0); // leave space for size output.WriteInt(indexInterval); // write indexInterval output.WriteInt(skipInterval); // write skipInterval output.WriteInt(maxSkipLevels); // write maxSkipLevels System.Diagnostics.Debug.Assert(InitUTF16Results()); }
/// <summary> Save this segment's info.</summary> internal void Write(IndexOutput output) { output.WriteString(name); output.WriteInt(docCount); output.WriteLong(delGen); output.WriteInt(docStoreOffset); if (docStoreOffset != - 1) { output.WriteString(docStoreSegment); output.WriteByte((byte) (docStoreIsCompoundFile?1:0)); } output.WriteByte((byte) (hasSingleNormFile?1:0)); if (normGen == null) { output.WriteInt(NO); } else { output.WriteInt(normGen.Length); for (int j = 0; j < normGen.Length; j++) { output.WriteLong(normGen[j]); } } output.WriteByte((byte) isCompoundFile); output.WriteInt(delCount); output.WriteByte((byte) (hasProx?1:0)); output.WriteStringStringMap(diagnostics); }
private void AddFixedSortedBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd, int length) { field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_SORTED.Name); CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT); CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT); /* values */ data.WriteInt(length); int valueCount = 0; foreach (BytesRef v in values) { data.WriteBytes(v.Bytes, v.Offset, v.Length); valueCount++; } /* ordinals */ index.WriteInt(valueCount); int maxDoc = State.SegmentInfo.DocCount; Debug.Assert(valueCount > 0); PackedInts.Writer w = PackedInts.GetWriter(index, maxDoc, PackedInts.BitsRequired(valueCount - 1), PackedInts.DEFAULT); foreach (long n in docToOrd) { w.Add((long)n); } w.Finish(); }