public override void Seek(long pos) { if (Debugging.AssertsEnabled) { Debugging.Assert(!closed); } @delegate.Seek(offset + pos); }
/// <summary> Starts but does not complete the commit of this file (= /// writing of the final checksum at the end). After this /// is called must call <see cref="FinishCommit" /> and the /// <see cref="Dispose" /> to complete the commit. /// </summary> public virtual void PrepareCommit() { long checksum = Checksum; // Intentionally write a mismatched checksum. This is // because we want to 1) test, as best we can, that we // are able to write a long to the file, but 2) not // actually "commit" the file yet. This (prepare // commit) is phase 1 of a two-phase commit. long pos = main.FilePointer; main.WriteLong(checksum - 1); main.Flush(); main.Seek(pos); }
/// <summary>Called to complete TermInfos creation. </summary> public void Dispose() { // Move to protected method if class becomes unsealed if (isDisposed) { return; } using (!isIndex ? other : null) using (output) { output.Seek(4); // write size after format output.WriteLong(size); } isDisposed = true; }
public override void Seek(long pos) { @delegate.Seek(pos); }
public override void Seek(long pos) { Flush(); @delegate.Seek(pos); }
public override void Seek(long pos) { Debug.Assert(!closed); @delegate.Seek(offset + pos); }
/// <summary>Merge files with the extensions added up to now. /// All files with these extensions are combined sequentially into the /// compound stream. After successful merge, the source files /// are deleted. /// </summary> /// <throws> IllegalStateException if close() had been called before or </throws> /// <summary> if no file has been added to this object /// </summary> public void Dispose() { // Extract into protected method if class ever becomes unsealed // TODO: Dispose shouldn't throw exceptions! if (merged) { throw new SystemException("Merge already performed"); } if ((entries.Count == 0)) { throw new SystemException("No entries to merge have been defined"); } merged = true; // open the compound stream IndexOutput os = null; try { var state = StateHolder.Current.Value; os = directory.CreateOutput(fileName, state); // Write the number of entries os.WriteVInt(entries.Count); // Write the directory with all offsets at 0. // Remember the positions of directory entries so that we can // adjust the offsets later long totalSize = 0; foreach (FileEntry fe in entries) { fe.directoryOffset = os.FilePointer; os.WriteLong(0); // for now os.WriteString(fe.file); totalSize += directory.FileLength(fe.file, state); } // Pre-allocate size of file as optimization -- // this can potentially help IO performance as // we write the file and also later during // searching. It also uncovers a disk-full // situation earlier and hopefully without // actually filling disk to 100%: long finalLength = totalSize + os.FilePointer; os.SetLength(finalLength); // Open the files and copy their data into the stream. // Remember the locations of each file's data section. var buffer = new byte[16384]; foreach (FileEntry fe in entries) { fe.dataOffset = os.FilePointer; CopyFile(fe, os, buffer, state); } // Write the data offsets into the directory of the compound stream foreach (FileEntry fe in entries) { os.Seek(fe.directoryOffset); os.WriteLong(fe.dataOffset); } System.Diagnostics.Debug.Assert(finalLength == os.Length); // Close the output stream. Set the os to null before trying to // close so that if an exception occurs during the close, the // finally clause below will not attempt to close the stream // the second time. IndexOutput tmp = os; os = null; tmp.Close(); } finally { if (os != null) { try { os.Close(); } catch (System.IO.IOException) { } } } }