private static void TestCreateNewFile(DiskIo stream, FileHeaderBlock fat) { SubFileName id1 = SubFileName.CreateRandom(); SubFileName id2 = SubFileName.CreateRandom(); SubFileName id3 = SubFileName.CreateRandom(); TransactionalEdit trans = new TransactionalEdit(stream); //create 3 files SubFileStream fs1 = trans.CreateFile(id1); SubFileStream fs2 = trans.CreateFile(id2); SubFileStream fs3 = trans.CreateFile(id3); if (fs1.SubFile.FileName != id1) { throw new Exception(); } //write to the three files SubFileStreamTest.TestSingleByteWrite(fs1); SubFileStreamTest.TestCustomSizeWrite(fs2, 5); SubFileStreamTest.TestCustomSizeWrite(fs3, BlockDataLength + 20); //read from them and verify content. SubFileStreamTest.TestCustomSizeRead(fs3, BlockDataLength + 20); SubFileStreamTest.TestCustomSizeRead(fs2, 5); SubFileStreamTest.TestSingleByteRead(fs1); fs1.Dispose(); fs2.Dispose(); fs3.Dispose(); trans.CommitAndDispose(); }
internal static void TestCustomSizeRead(SubFileStream ds, int seed) { using (BinaryStream bs = new BinaryStream(ds)) { Random r = new Random(seed); byte[] buffer = new byte[25]; byte[] buffer2 = new byte[25]; bs.Position = 0; for (int x = 0; x < 1000; x++) { for (int i = 0; i < buffer.Length; i++) { buffer[i] = (byte)r.Next(); } int length = r.Next(25); bs.ReadAll(buffer2, 0, length); for (int i = 0; i < length; i++) { if (buffer[i] != buffer2[i]) { throw new Exception(); } } } } }
private void CreateArchiveFile <TKey, TValue>(SubFileName fileName, EncodingDefinition storageMethod, int maxSortedTreeBlockSize) where TKey : SnapTypeBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { if (maxSortedTreeBlockSize < 1024) { throw new ArgumentOutOfRangeException(nameof(maxSortedTreeBlockSize), "Must be greater than 1024"); } if ((object)storageMethod == null) { throw new ArgumentNullException("storageMethod"); } using (TransactionalEdit trans = m_fileStructure.BeginEdit()) { using (SubFileStream fs = trans.CreateFile(fileName)) using (BinaryStream bs = new BinaryStream(fs)) { int blockSize = m_fileStructure.Snapshot.Header.DataBlockSize; while (blockSize > maxSortedTreeBlockSize) { blockSize >>= 2; } SortedTree <TKey, TValue> tree = SortedTree <TKey, TValue> .Create(bs, blockSize, storageMethod); tree.Flush(); } trans.ArchiveType = FileType; trans.CommitAndDispose(); } }
private static void TestOpenExistingFile(DiskIo stream, FileHeaderBlock fat) { Guid id = Guid.NewGuid(); TransactionalEdit trans = new TransactionalEdit(stream); //create 3 files SubFileStream fs1 = trans.OpenFile(0); SubFileStream fs2 = trans.OpenFile(1); SubFileStream fs3 = trans.OpenFile(2); //read from them and verify content. SubFileStreamTest.TestSingleByteRead(fs1); SubFileStreamTest.TestCustomSizeRead(fs2, 5); SubFileStreamTest.TestCustomSizeRead(fs3, BlockDataLength + 20); //rewrite bad data. SubFileStreamTest.TestSingleByteWrite(fs2); SubFileStreamTest.TestCustomSizeWrite(fs3, 5); SubFileStreamTest.TestCustomSizeWrite(fs1, BlockDataLength + 20); fs1.Dispose(); fs2.Dispose(); fs3.Dispose(); trans.CommitAndDispose(); }
public void TestSequentialWriteAmplification() { MemoryPoolTest.TestMemoryLeak(); double size; Stats.ChecksumCount = 0; DiskIoSession.WriteCount = 0; DiskIoSession.ReadCount = 0; using (TransactionalFileStructure file = TransactionalFileStructure.CreateInMemory(4096)) using (TransactionalEdit edit = file.BeginEdit()) using (SubFileStream stream = edit.CreateFile(SubFileName.CreateRandom())) using (BinaryStream bs = new BinaryStream(stream)) { Stats.ChecksumCount = 0; DiskIoSession.WriteCount = 0; DiskIoSession.ReadCount = 0; //Write 8 million for (long s = 0; s < 1000000; s++) { bs.Write(s); } size = bs.Position / 4096.0; } System.Console.WriteLine("Read: " + (DiskIoSession.ReadCount / size).ToString("0.0")); System.Console.WriteLine("Write: " + (DiskIoSession.WriteCount / size).ToString("0.0")); System.Console.WriteLine("Checksums: " + (Stats.ChecksumCount / size).ToString("0.0")); MemoryPoolTest.TestMemoryLeak(); }
/// <summary> /// Commits the edits to the current archive file and disposes of this class. /// </summary> public override void Commit() { if (m_disposed) { throw new ObjectDisposedException(GetType().FullName); } GetKeyRange(m_sortedTreeFile.m_firstKey, m_sortedTreeFile.m_lastKey); if (m_tree != null) { m_tree.Flush(); m_tree = null; } if (m_binaryStream1 != null) { m_binaryStream1.Dispose(); m_binaryStream1 = null; } if (m_subStream != null) { m_subStream.Dispose(); m_subStream = null; } m_currentTransaction.CommitAndDispose(); InternalDispose(); }
private static void TestVerifyRollback(DiskIo stream, FileHeaderBlock fat) { Guid id = Guid.NewGuid(); TransactionalEdit trans = new TransactionalEdit(stream); if (trans.Files.Count != 3) { throw new Exception(); } //open files SubFileStream fs1 = trans.OpenFile(0); SubFileStream fs2 = trans.OpenFile(1); SubFileStream fs3 = trans.OpenFile(2); //read from them and verify content. SubFileStreamTest.TestSingleByteRead(fs2); SubFileStreamTest.TestCustomSizeRead(fs3, 5); SubFileStreamTest.TestCustomSizeRead(fs1, BlockDataLength + 20); fs1.Dispose(); fs2.Dispose(); fs3.Dispose(); trans.Dispose(); }
internal static void TestSingleByteWrite(SubFileStream ds) { //ds.Position = 0; //for (int x = 0; x < 10000; x++) //{ // ds.WriteByte((byte)x); //} //ds.Flush(); }
internal static void TestSingleByteRead(SubFileStream ds) { //ds.Position = 0; //for (int x = 0; x < 10000; x++) //{ // if ((byte)x != ds.ReadByte()) // throw new Exception(); //} }
internal Editor(SortedTreeTable <TKey, TValue> sortedTreeFile) { m_sortedTreeFile = sortedTreeFile; m_currentTransaction = m_sortedTreeFile.m_fileStructure.BeginEdit(); m_subStream = m_currentTransaction.OpenFile(sortedTreeFile.m_fileName); m_binaryStream1 = new BinaryStream(m_subStream); m_tree = SortedTree <TKey, TValue> .Open(m_binaryStream1); m_tree.AutoFlush = false; }
internal static void TestSingleByteWrite(SubFileStream ds) { using (BinaryStream bs = new BinaryStream(ds)) { bs.Position = 0; for (int x = 0; x < 10000; x++) { bs.Write((byte)x); } } }
private static void TestBinaryStream(DiskIo stream) { FileHeaderBlock header = stream.LastCommittedHeader; header = header.CloneEditable(); SubFileHeader node = header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); SubFileStream ds = new SubFileStream(stream, node, header, false); BinaryStreamTest.Test(ds); }
internal SortedTreeTableReadSnapshot(ReadSnapshot currentTransaction, SubFileName fileName) { try { m_subStream = currentTransaction.OpenFile(fileName); m_binaryStream = new BinaryStream(m_subStream); m_tree = SortedTree <TKey, TValue> .Open(m_binaryStream); } catch { Dispose(); throw; } }
internal static void TestSingleByteRead(SubFileStream ds) { using (BinaryStream bs = new BinaryStream(ds)) { bs.Position = 0; for (int x = 0; x < 10000; x++) { if ((byte)x != bs.ReadUInt8()) { throw new Exception(); } } } }
private static void TestReadAndWritesWithCommit(DiskIo stream) { FileHeaderBlock header; SubFileHeader node; SubFileStream ds, ds1, ds2; //Open The File For Editing header = stream.LastCommittedHeader.CloneEditable(); node = header.Files[0]; ds = new SubFileStream(stream, node, header, false); TestSingleByteWrite(ds); stream.CommitChanges(header); header = stream.LastCommittedHeader; node = header.Files[0]; ds1 = ds = new SubFileStream(stream, node, header, true); TestSingleByteRead(ds); //Open The File For Editing header = stream.LastCommittedHeader.CloneEditable(); node = header.Files[0]; ds = new SubFileStream(stream, node, header, false); TestCustomSizeWrite(ds, 5); stream.CommitChanges(header); header = stream.LastCommittedHeader; node = header.Files[0]; ds2 = ds = new SubFileStream(stream, node, header, true); TestCustomSizeRead(ds, 5); //Open The File For Editing header = stream.LastCommittedHeader.CloneEditable(); node = header.Files[0]; ds = new SubFileStream(stream, node, header, false); TestCustomSizeWrite(ds, BlockDataLength + 20); stream.CommitChanges(header); header = stream.LastCommittedHeader; node = header.Files[0]; ds = new SubFileStream(stream, node, header, true); TestCustomSizeRead(ds, BlockDataLength + 20); //check old versions of the file TestSingleByteRead(ds1); TestCustomSizeRead(ds2, 5); }
internal static void TestCustomSizeWrite(SubFileStream ds, int length) { //Random r = new Random(length); //ds.Position = 0; //byte[] buffer = new byte[25]; //for (int x = 0; x < 1000; x++) //{ // for (int i = 0; i < buffer.Length; i++) // { // buffer[i] = (byte)r.Next(); // } // ds.Write(buffer, 0, r.Next(25)); //} //ds.Flush(); }
private static void TestReadAndWrites(DiskIo stream) { FileHeaderBlock header = stream.LastCommittedHeader; header = header.CloneEditable(); SubFileHeader node = header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); SubFileStream ds = new SubFileStream(stream, node, header, false); TestSingleByteWrite(ds); TestSingleByteRead(ds); TestCustomSizeWrite(ds, 5); TestCustomSizeRead(ds, 5); TestCustomSizeWrite(ds, BlockDataLength + 20); TestCustomSizeRead(ds, BlockDataLength + 20); stream.CommitChanges(header); }
internal static void TestCustomSizeWrite(SubFileStream ds, int length) { using (BinaryStream bs = new BinaryStream(ds)) { Random r = new Random(length); bs.Position = 0; byte[] buffer = new byte[25]; for (int x = 0; x < 1000; x++) { for (int i = 0; i < buffer.Length; i++) { buffer[i] = (byte)r.Next(); } bs.Write(buffer, 0, r.Next(25)); } } }
public void TestSubFileStream() { const int BlockSize = 256; MemoryPoolTest.TestMemoryLeak(); //string file = Path.GetTempFileName(); //System.IO.File.Delete(file); try { //using (FileSystemSnapshotService service = FileSystemSnapshotService.CreateFile(file)) using (TransactionalFileStructure service = TransactionalFileStructure.CreateInMemory(BlockSize)) { using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream fs = edit.CreateFile(SubFileName.Empty); BinaryStream bs = new BinaryStream(fs); for (int x = 0; x < 20000000; x++) { bs.Write(1L); } bs.Position = 0; BinaryStreamBenchmark.Run(bs, false); bs.Dispose(); fs.Dispose(); edit.CommitAndDispose(); } } } finally { //System.IO.File.Delete(file); } MemoryPoolTest.TestMemoryLeak(); }
/// <summary> /// Rolls back all edits that are made to the archive file and disposes of this class. /// </summary> public override void Rollback() { if (m_disposed) { throw new ObjectDisposedException(GetType().FullName); } m_tree = null; if (m_binaryStream1 != null) { m_binaryStream1.Dispose(); m_binaryStream1 = null; } if (m_subStream != null) { m_subStream.Dispose(); m_subStream = null; } m_currentTransaction.RollbackAndDispose(); InternalDispose(); }
private static void TestRollback(DiskIo stream, FileHeaderBlock fat) { SubFileName id1 = SubFileName.CreateRandom(); SubFileName id2 = SubFileName.CreateRandom(); SubFileName id3 = SubFileName.CreateRandom(); TransactionalEdit trans = new TransactionalEdit(stream); //create 3 files additional files SubFileStream fs21 = trans.CreateFile(id1); SubFileStream fs22 = trans.CreateFile(id2); SubFileStream fs23 = trans.CreateFile(id3); //open files SubFileStream fs1 = trans.OpenFile(0); SubFileStream fs2 = trans.OpenFile(1); SubFileStream fs3 = trans.OpenFile(2); //read from them and verify content. SubFileStreamTest.TestSingleByteRead(fs2); SubFileStreamTest.TestCustomSizeRead(fs3, 5); SubFileStreamTest.TestCustomSizeRead(fs1, BlockDataLength + 20); //rewrite bad data. SubFileStreamTest.TestSingleByteWrite(fs3); SubFileStreamTest.TestCustomSizeWrite(fs1, 5); SubFileStreamTest.TestCustomSizeWrite(fs2, BlockDataLength + 20); fs1.Dispose(); fs2.Dispose(); fs3.Dispose(); fs21.Dispose(); fs22.Dispose(); fs23.Dispose(); trans.RollbackAndDispose(); }
internal static void TestCustomSizeRead(SubFileStream ds, int seed) { //Random r = new Random(seed); //byte[] buffer = new byte[25]; //byte[] buffer2 = new byte[25]; //ds.Position = 0; //for (int x = 0; x < 1000; x++) //{ // for (int i = 0; i < buffer.Length; i++) // { // buffer[i] = (byte)r.Next(); // } // int length = r.Next(25); // ds.Read(buffer2, 0, length); // for (int i = 0; i < length; i++) // { // if (buffer[i] != buffer2[i]) // throw new Exception(); // } //} //ds.Flush(); }
/// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> /// <filterpriority>2</filterpriority> public void Dispose() { if (!m_disposed) { try { if (m_binaryStream != null) { m_binaryStream.Dispose(); } if (m_subStream != null) { m_subStream.Dispose(); } } finally { m_subStream = null; m_binaryStream = null; m_tree = null; m_disposed = true; } } }
private static void TestReadAndWritesToDifferentFilesWithCommit(DiskIo stream) { FileHeaderBlock header; SubFileStream ds; //Open The File For Editing header = stream.LastCommittedHeader.CloneEditable(); ds = new SubFileStream(stream, header.Files[0], header, false); TestSingleByteWrite(ds); ds = new SubFileStream(stream, header.Files[1], header, false); TestCustomSizeWrite(ds, 5); ds = new SubFileStream(stream, header.Files[2], header, false); TestCustomSizeWrite(ds, BlockDataLength + 20); stream.CommitChanges(header); header = stream.LastCommittedHeader; ds = new SubFileStream(stream, header.Files[0], header, true); TestSingleByteRead(ds); ds = new SubFileStream(stream, header.Files[1], header, true); TestCustomSizeRead(ds, 5); ds = new SubFileStream(stream, header.Files[2], header, true); TestCustomSizeRead(ds, BlockDataLength + 20); }
public void Test() { Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); //string file = Path.GetTempFileName(); //System.IO.File.Delete(file); try { //using (FileSystemSnapshotService service = FileSystemSnapshotService.CreateFile(file)) using (TransactionalFileStructure service = TransactionalFileStructure.CreateInMemory(BlockSize)) { using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream fs = edit.CreateFile(SubFileName.CreateRandom()); BinaryStream bs = new BinaryStream(fs); bs.Write((byte)1); bs.Dispose(); fs.Dispose(); edit.CommitAndDispose(); } { ReadSnapshot read = service.Snapshot; SubFileStream f1 = read.OpenFile(0); BinaryStream bs1 = new BinaryStream(f1); if (bs1.ReadUInt8() != 1) { throw new Exception(); } using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream f2 = edit.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); if (bs2.ReadUInt8() != 1) { throw new Exception(); } bs2.Write((byte)3); bs2.Dispose(); } //rollback should be issued; if (bs1.ReadUInt8() != 0) { throw new Exception(); } bs1.Dispose(); { ReadSnapshot read2 = service.Snapshot; SubFileStream f2 = read2.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); if (bs2.ReadUInt8() != 1) { throw new Exception(); } if (bs2.ReadUInt8() != 0) { throw new Exception(); } bs2.Dispose(); } } using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream f2 = edit.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); bs2.Write((byte)13); bs2.Write((byte)23); bs2.Dispose(); edit.RollbackAndDispose(); } //rollback should be issued; } } finally { //System.IO.File.Delete(file); } Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); Assert.IsTrue(true); }
internal static void TestSingleByteRead(SubFileStream ds) { using (BinaryStream bs = new BinaryStream(ds)) { bs.Position = 0; for (int x = 0; x < 10000; x++) { if ((byte)x != bs.ReadUInt8()) throw new Exception(); } } }
internal static void TestCustomSizeRead(SubFileStream ds, int seed) { using (BinaryStream bs = new BinaryStream(ds)) { Random r = new Random(seed); byte[] buffer = new byte[25]; byte[] buffer2 = new byte[25]; bs.Position = 0; for (int x = 0; x < 1000; x++) { for (int i = 0; i < buffer.Length; i++) { buffer[i] = (byte)r.Next(); } int length = r.Next(25); bs.ReadAll(buffer2, 0, length); for (int i = 0; i < length; i++) { if (buffer[i] != buffer2[i]) throw new Exception(); } } } }