public void when_writing_then_seeking_exact_to_alignment_and_writing_again() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(8192); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, 5012); Assert.AreEqual(5012, stream.Position); stream.Seek(4096, SeekOrigin.Begin); Assert.AreEqual(4096, stream.Position); bytes = GetBytes(15); stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(4111, stream.Position); stream.Flush(); Assert.AreEqual(4111, stream.Position); Assert.AreEqual(8192, new FileInfo(filename).Length); var read = ReadAllBytesShared(filename); for (var i = 0; i < 255; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void seek_current_unimplemented() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { Assert.Throws <NotImplementedException>(() => stream.Seek(0, SeekOrigin.Current)); } }
public void seek_origin_end_to_mid_of_file() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Seek(-30, SeekOrigin.End); Assert.AreEqual(stream.Length - 30, stream.Position); } }
public void when_resizing_a_file() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096); stream.SetLength(4096 * 1024); stream.Close(); Assert.AreEqual(4096 * 1024, new FileInfo(filename).Length); }
public void when_writing_less_than_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(255); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(bytes.Length, stream.Position); Assert.AreEqual(0, new FileInfo(filename).Length); } }
public void when_seeking_greater_than_2gb() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var GIGABYTE = 1024L * 1024L * 1024L; try { using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.SetLength(4L * GIGABYTE); stream.Seek(3L * GIGABYTE, SeekOrigin.Begin); } } finally { File.Delete(filename); } }
public void when_reading_on_aligned_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 20000); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { var read = new byte[4096]; stream.Read(read, 0, 4096); for (var i = 0; i < 4096; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void when_reading_on_unaligned_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 20000); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Seek(15, SeekOrigin.Begin); var read = new byte[999]; stream.Read(read, 0, read.Length); for (var i = 0; i < read.Length; i++) { Assert.AreEqual((i + 15) % 256, read[i]); } } }
public void when_writing_more_than_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(9000); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(4096 * 2, new FileInfo(filename).Length); var read = ReadAllBytesShared(filename); for (var i = 0; i < 4096 * 2; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void seek_write_seek_read_in_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { var buffer = GetBytes(255); stream.Seek(4096 + 15, SeekOrigin.Begin); stream.Write(buffer, 0, buffer.Length); stream.Seek(4096 + 15, SeekOrigin.Begin); var read = new byte[255]; stream.Read(read, 0, read.Length); for (var i = 0; i < read.Length; i++) { Assert.AreEqual(i % 255, read[i]); } } }
public void when_reading_multiple_times_no_seek() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 20000); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { var read = new byte[1000]; stream.Read(read, 0, 500); Assert.AreEqual(500, stream.Position); stream.Read(read, 500, 500); Assert.AreEqual(1000, stream.Position); for (var i = 0; i < read.Length; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void when_writing_less_than_buffer_and_seeking() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(255); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, bytes.Length); stream.Seek(0, SeekOrigin.Begin); Assert.AreEqual(0, stream.Position); Assert.AreEqual(4096, new FileInfo(filename).Length); var read = ReadAllBytesShared(filename); for (var i = 0; i < 255; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void when_expanding_an_aligned_file_by_one_byte() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096); var initialFileSize = 4096 * 1024; stream.SetLength(initialFileSize); //initial size of 4MB stream.Seek(0, SeekOrigin.End); Assert.AreEqual(initialFileSize, stream.Position); //verify position stream.SetLength(initialFileSize + 1); //expand file by 1 byte Assert.AreEqual(initialFileSize, stream.Position); //position should not change stream.Close(); Assert.AreEqual(initialFileSize + 4096, new FileInfo(filename).Length); //file size should increase by 4KB }
public void when_seeking_non_exact_to_zero_block_and_writing() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 4096 * 64); var bytes = GetBytes(512); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Seek(128, SeekOrigin.Begin); stream.Write(bytes, 0, bytes.Length); stream.Flush(); } using (var stream = new FileStream(filename, FileMode.Open)) { var read = new byte[128]; stream.Read(read, 0, 128); for (var i = 0; i < read.Length; i++) { Assert.AreEqual(i, read[i]); } } }
public void when_reading_multiple_times_exact_page_size() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 4096 * 100 + 50); Span <byte> expected = GetBytes(4096); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { var read = new byte[4096]; for (var i = 0; i < 100; i++) { var total = stream.Read(read, 0, 4096); Assert.AreEqual(4096 * (i + 1), stream.Position); Assert.AreEqual(4096, total); if (!expected.SequenceEqual(read)) { for (var j = 0; j < read.Length; j++) { Assert.AreEqual(j % 256, read[j]); } } } expected = expected.Slice(0, 50); var total2 = stream.Read(read, 0, 50); Assert.AreEqual(409600 + 50, stream.Position); Assert.AreEqual(50, total2); if (!expected.SequenceEqual(read)) { for (var j = 0; j < 50; j++) { Assert.AreEqual(j % 256, read[j]); } } } }
public void when_writing_multiple_times() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(256); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(256, stream.Position); stream.Flush(); Assert.AreEqual(256, stream.Position); stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(512, stream.Position); stream.Flush(); Assert.AreEqual(512, stream.Position); Assert.AreEqual(4096, new FileInfo(filename).Length); var read = ReadAllBytesShared(filename); for (var i = 0; i < 512; i++) { Assert.AreEqual(i % 256, read[i]); } } }
internal Midpoint[] CacheMidpointsAndVerifyHash(int depth, bool skipIndexVerify) { var buffer = new byte[4096]; if (depth < 0 || depth > 30) { throw new ArgumentOutOfRangeException("depth"); } var count = Count; if (count == 0 || depth == 0) { return(null); } if (skipIndexVerify) { Log.Debug("Disabling Verification of PTable"); } Stream stream = null; WorkItem workItem = null; if (Runtime.IsUnixOrMac) { workItem = GetWorkItem(); stream = workItem.Stream; } else { stream = UnbufferedFileStream.Create(_filename, FileMode.Open, FileAccess.Read, FileShare.Read, false, 4096, 4096, false, 4096); } try { int midpointsCount; Midpoint[] midpoints; using (MD5 md5 = MD5.Create()) { try { midpointsCount = (int)Math.Max(2L, Math.Min((long)1 << depth, count)); midpoints = new Midpoint[midpointsCount]; } catch (OutOfMemoryException exc) { throw new PossibleToHandleOutOfMemoryException("Failed to allocate memory for Midpoint cache.", exc); } if (skipIndexVerify && (_version >= PTableVersions.IndexV4)) { if (_midpointsCached == midpointsCount) { //index verification is disabled and cached midpoints with the same depth requested are available //so, we can load them directly from the PTable file Log.Debug("Loading {midpointsCached} cached midpoints from PTable", _midpointsCached); long startOffset = stream.Length - MD5Size - PTableFooter.GetSize(_version) - _midpointsCacheSize; stream.Seek(startOffset, SeekOrigin.Begin); for (uint k = 0; k < _midpointsCached; k++) { stream.Read(buffer, 0, _indexEntrySize); IndexEntryKey key; long index; if (_version == PTableVersions.IndexV4) { key = new IndexEntryKey(BitConverter.ToUInt64(buffer, 8), BitConverter.ToInt64(buffer, 0)); index = BitConverter.ToInt64(buffer, 8 + 8); } else { throw new InvalidOperationException("Unknown PTable version: " + _version); } midpoints[k] = new Midpoint(key, index); if (k > 0) { if (midpoints[k].Key.GreaterThan(midpoints[k - 1].Key)) { throw new CorruptIndexException(String.Format( "Index entry key for midpoint {0} (stream: {1}, version: {2}) < index entry key for midpoint {3} (stream: {4}, version: {5})", k - 1, midpoints[k - 1].Key.Stream, midpoints[k - 1].Key.Version, k, midpoints[k].Key.Stream, midpoints[k].Key.Version)); } else if (midpoints[k - 1].ItemIndex > midpoints[k].ItemIndex) { throw new CorruptIndexException(String.Format( "Item index for midpoint {0} ({1}) > Item index for midpoint {2} ({3})", k - 1, midpoints[k - 1].ItemIndex, k, midpoints[k].ItemIndex)); } } } return(midpoints); } else { Log.Debug( "Skipping loading of cached midpoints from PTable due to count mismatch, cached midpoints: {midpointsCached} / required midpoints: {midpointsCount}", _midpointsCached, midpointsCount); } } if (!skipIndexVerify) { stream.Seek(0, SeekOrigin.Begin); stream.Read(buffer, 0, PTableHeader.Size); md5.TransformBlock(buffer, 0, PTableHeader.Size, null, 0); } long previousNextIndex = long.MinValue; var previousKey = new IndexEntryKey(long.MaxValue, long.MaxValue); for (long k = 0; k < midpointsCount; ++k) { long nextIndex = GetMidpointIndex(k, count, midpointsCount); if (previousNextIndex != nextIndex) { if (!skipIndexVerify) { ReadUntilWithMd5(PTableHeader.Size + _indexEntrySize * nextIndex, stream, md5); stream.Read(buffer, 0, _indexKeySize); md5.TransformBlock(buffer, 0, _indexKeySize, null, 0); } else { stream.Seek(PTableHeader.Size + _indexEntrySize * nextIndex, SeekOrigin.Begin); stream.Read(buffer, 0, _indexKeySize); } IndexEntryKey key; if (_version == PTableVersions.IndexV1) { key = new IndexEntryKey(BitConverter.ToUInt32(buffer, 4), BitConverter.ToInt32(buffer, 0)); } else if (_version == PTableVersions.IndexV2) { key = new IndexEntryKey(BitConverter.ToUInt64(buffer, 4), BitConverter.ToInt32(buffer, 0)); } else { key = new IndexEntryKey(BitConverter.ToUInt64(buffer, 8), BitConverter.ToInt64(buffer, 0)); } midpoints[k] = new Midpoint(key, nextIndex); previousNextIndex = nextIndex; previousKey = key; } else { midpoints[k] = new Midpoint(previousKey, previousNextIndex); } if (k > 0) { if (midpoints[k].Key.GreaterThan(midpoints[k - 1].Key)) { throw new CorruptIndexException(String.Format( "Index entry key for midpoint {0} (stream: {1}, version: {2}) < index entry key for midpoint {3} (stream: {4}, version: {5})", k - 1, midpoints[k - 1].Key.Stream, midpoints[k - 1].Key.Version, k, midpoints[k].Key.Stream, midpoints[k].Key.Version)); } else if (midpoints[k - 1].ItemIndex > midpoints[k].ItemIndex) { throw new CorruptIndexException(String.Format( "Item index for midpoint {0} ({1}) > Item index for midpoint {2} ({3})", k - 1, midpoints[k - 1].ItemIndex, k, midpoints[k].ItemIndex)); } } } if (!skipIndexVerify) { ReadUntilWithMd5(stream.Length - MD5Size, stream, md5); //verify hash (should be at stream.length - MD5Size) md5.TransformFinalBlock(Empty.ByteArray, 0, 0); var fileHash = new byte[MD5Size]; stream.Read(fileHash, 0, MD5Size); ValidateHash(md5.Hash, fileHash); } return(midpoints); } } catch { Dispose(); throw; } finally { if (Runtime.IsUnixOrMac) { if (workItem != null) { ReturnWorkItem(workItem); } } else { if (stream != null) { stream.Dispose(); } } } }
internal Midpoint[] CacheMidpointsAndVerifyHash(int depth) { var buffer = new byte[4096]; if (depth < 0 || depth > 30) { throw new ArgumentOutOfRangeException("depth"); } var count = Count; if (count == 0 || depth == 0) { return(null); } #if __MonoCS__ var workItem = GetWorkItem(); var stream = workItem.Stream; try { #else using (var stream = UnbufferedFileStream.Create(_filename, FileMode.Open, FileAccess.Read, FileShare.Read, false, 4096, 4096, false, 4096)) { #endif try { int midpointsCount; Midpoint[] midpoints; using (MD5 md5 = MD5.Create()) { try { midpointsCount = (int)Math.Max(2L, Math.Min((long)1 << depth, count)); midpoints = new Midpoint[midpointsCount]; } catch (OutOfMemoryException exc) { throw new PossibleToHandleOutOfMemoryException("Failed to allocate memory for Midpoint cache.", exc); } stream.Seek(0, SeekOrigin.Begin); stream.Read(buffer, 0, PTableHeader.Size); md5.TransformBlock(buffer, 0, PTableHeader.Size, null, 0); long previousNextIndex = long.MinValue; var previousKey = new IndexEntryKey(long.MaxValue, int.MaxValue); for (long k = 0; k < midpointsCount; ++k) { var nextIndex = (long)k * (count - 1) / (midpointsCount - 1); if (previousNextIndex != nextIndex) { ReadUntilWithMd5(PTableHeader.Size + _indexEntrySize * nextIndex, stream, md5); stream.Read(buffer, 0, _indexKeySize); md5.TransformBlock(buffer, 0, _indexKeySize, null, 0); IndexEntryKey key; if (_version == PTableVersions.Index32Bit) { key = new IndexEntryKey(BitConverter.ToUInt32(buffer, 4), BitConverter.ToInt32(buffer, 0)); } else { key = new IndexEntryKey(BitConverter.ToUInt64(buffer, 4), BitConverter.ToInt32(buffer, 0)); } midpoints[k] = new Midpoint(key, nextIndex); previousNextIndex = nextIndex; previousKey = key; } else { midpoints[k] = new Midpoint(previousKey, previousNextIndex); } } ReadUntilWithMd5(stream.Length - MD5Size, stream, md5); //verify hash (should be at stream.length - MD5Size) md5.TransformFinalBlock(Empty.ByteArray, 0, 0); var fileHash = new byte[MD5Size]; stream.Read(fileHash, 0, MD5Size); ValidateHash(md5.Hash, fileHash); return(midpoints); } } catch { Dispose(); throw; } } #if __MonoCS__ finally { ReturnWorkItem(workItem); } #endif }