internal void AddDocument(Document doc) { indexStream.WriteLong(fieldsStream.GetFilePointer()); int storedCount = 0; foreach (Field field in doc.Fields()) { if (field.IsStored()) { storedCount++; } } fieldsStream.WriteVInt(storedCount); foreach (Field field in doc.Fields()) { if (field.IsStored()) { fieldsStream.WriteVInt(fieldInfos.FieldNumber(field.Name())); byte bits = 0; if (field.IsTokenized()) { bits |= 1; } fieldsStream.WriteByte(bits); fieldsStream.WriteString(field.StringValue()); } } }
public void Write(Directory directory) { OutputStream output = directory.CreateFile("segments.new"); try { output.WriteInt(FORMAT); // write FORMAT output.WriteLong(++version); // every write changes the index output.WriteInt(counter); // write counter output.WriteInt(Count); // write infos for (int i = 0; i < Count; i++) { SegmentInfo si = Info(i); output.WriteString(si.name); output.WriteInt(si.docCount); } } finally { output.Close(); } // install new segment info directory.RenameFile("segments.new", "segments"); }
private void Initialize(Directory directory, System.String segment, FieldInfos fis, bool isi) { fieldInfos = fis; isIndex = isi; output = directory.CreateFile(segment + (isIndex?".tii":".tis")); output.WriteInt(FORMAT); // write format output.WriteLong(0); // leave space for size output.WriteInt(indexInterval); // write indexInterval output.WriteInt(skipInterval); // write skipInterval }
/// <summary>Called to complete TermInfos creation. </summary> public /*internal*/ void Close() { output.Seek(4); // write size after format output.WriteLong(size); output.Close(); if (!isIndex) { other.Close(); } }
/// <summary>Merge files with the extensions added up to now. /// All files with these extensions are combined sequentially into the /// compound stream. After successful merge, the source files /// are deleted. /// </summary> public void Close() { if (merged) { throw new System.SystemException("Merge already performed"); } if ((entries.Count == 0)) { throw new System.SystemException("No entries to merge have been defined"); } merged = true; // open the compound stream OutputStream os = null; try { os = directory.CreateFile(fileName); // Write the number of entries os.WriteVInt(entries.Count); // Write the directory with all offsets at 0. // Remember the positions of directory entries so that we can // adjust the offsets later System.Collections.IEnumerator it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; fe.directoryOffset = os.GetFilePointer(); os.WriteLong(0); // for now os.WriteString(fe.file); } // Open the files and copy their data into the stream. // Remeber the locations of each file's data section. byte[] buffer = new byte[1024]; it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; fe.dataOffset = os.GetFilePointer(); CopyFile(fe, os, buffer); } // Write the data offsets into the directory of the compound stream it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; os.Seek(fe.directoryOffset); os.WriteLong(fe.dataOffset); } // Close the output stream. Set the os to null before trying to // close so that if an exception occurs during the close, the // finally clause below will not attempt to close the stream // the second time. OutputStream tmp = os; os = null; tmp.Close(); } finally { if (os != null) { try { os.Close(); } catch (System.IO.IOException) { } } } }