internal void AddDocument(Document doc) { indexStream.WriteLong(fieldsStream.GetFilePointer()); int storedCount = 0; foreach (Field field in doc.Fields()) { if (field.IsStored()) { storedCount++; } } fieldsStream.WriteVInt(storedCount); foreach (Field field in doc.Fields()) { if (field.IsStored()) { fieldsStream.WriteVInt(fieldInfos.FieldNumber(field.Name())); byte bits = 0; if (field.IsTokenized()) { bits |= 1; } fieldsStream.WriteByte(bits); fieldsStream.WriteString(field.StringValue()); } } }
public void Write(Directory directory) { OutputStream output = directory.CreateFile("segments.new"); try { output.WriteInt(FORMAT); // write FORMAT output.WriteLong(++version); // every write changes the index output.WriteInt(counter); // write counter output.WriteInt(Count); // write infos for (int i = 0; i < Count; i++) { SegmentInfo si = Info(i); output.WriteString(si.name); output.WriteInt(si.docCount); } } finally { output.Close(); } // install new segment info directory.RenameFile("segments.new", "segments"); }
private void WriteDeleteableFiles(System.Collections.ArrayList files) { OutputStream output = directory.CreateFile("deleteable.new"); try { output.WriteInt(files.Count); for (int i = 0; i < files.Count; i++) { output.WriteString((System.String)files[i]); } } finally { output.Close(); } directory.RenameFile("deleteable.new", "deletable"); }
public void Write(OutputStream output) { output.WriteVInt(Size()); for (int i = 0; i < Size(); i++) { FieldInfo fi = FieldInfo(i); byte bits = (byte)(0x0); if (fi.isIndexed) { bits |= (byte)(0x1); } if (fi.storeTermVector) { bits |= (byte)(0x2); } output.WriteString(fi.name); //Was REMOVE //output.writeByte((byte)(fi.isIndexed ? 1 : 0)); output.WriteByte(bits); } }
/// <summary>Merge files with the extensions added up to now. /// All files with these extensions are combined sequentially into the /// compound stream. After successful merge, the source files /// are deleted. /// </summary> public void Close() { if (merged) { throw new System.SystemException("Merge already performed"); } if ((entries.Count == 0)) { throw new System.SystemException("No entries to merge have been defined"); } merged = true; // open the compound stream OutputStream os = null; try { os = directory.CreateFile(fileName); // Write the number of entries os.WriteVInt(entries.Count); // Write the directory with all offsets at 0. // Remember the positions of directory entries so that we can // adjust the offsets later System.Collections.IEnumerator it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; fe.directoryOffset = os.GetFilePointer(); os.WriteLong(0); // for now os.WriteString(fe.file); } // Open the files and copy their data into the stream. // Remeber the locations of each file's data section. byte[] buffer = new byte[1024]; it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; fe.dataOffset = os.GetFilePointer(); CopyFile(fe, os, buffer); } // Write the data offsets into the directory of the compound stream it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; os.Seek(fe.directoryOffset); os.WriteLong(fe.dataOffset); } // Close the output stream. Set the os to null before trying to // close so that if an exception occurs during the close, the // finally clause below will not attempt to close the stream // the second time. OutputStream tmp = os; os = null; tmp.Close(); } finally { if (os != null) { try { os.Close(); } catch (System.IO.IOException) { } } } }
public void Write(OutputStream output) { output.WriteVInt(Size()); for (int i = 0; i < Size(); i++) { FieldInfo fi = FieldInfo(i); byte bits = (byte) (0x0); if (fi.isIndexed) bits |= (byte) (0x1); if (fi.storeTermVector) bits |= (byte) (0x2); output.WriteString(fi.name); //Was REMOVE //output.writeByte((byte)(fi.isIndexed ? 1 : 0)); output.WriteByte(bits); } }