private static void TestCreateNewFile(DiskIo stream, FileHeaderBlock fat) { SubFileName id1 = SubFileName.CreateRandom(); SubFileName id2 = SubFileName.CreateRandom(); SubFileName id3 = SubFileName.CreateRandom(); TransactionalEdit trans = new TransactionalEdit(stream); //create 3 files SubFileStream fs1 = trans.CreateFile(id1); SubFileStream fs2 = trans.CreateFile(id2); SubFileStream fs3 = trans.CreateFile(id3); if (fs1.SubFile.FileName != id1) { throw new Exception(); } //write to the three files SubFileStreamTest.TestSingleByteWrite(fs1); SubFileStreamTest.TestCustomSizeWrite(fs2, 5); SubFileStreamTest.TestCustomSizeWrite(fs3, BlockDataLength + 20); //read from them and verify content. SubFileStreamTest.TestCustomSizeRead(fs3, BlockDataLength + 20); SubFileStreamTest.TestCustomSizeRead(fs2, 5); SubFileStreamTest.TestSingleByteRead(fs1); fs1.Dispose(); fs2.Dispose(); fs3.Dispose(); trans.CommitAndDispose(); }
/// <summary> /// Helper method. Creates the <see cref="SubFileName"/> for the default table. /// </summary> /// <typeparam name="TKey"></typeparam> /// <typeparam name="TValue"></typeparam> /// <returns></returns> private static SubFileName GetFileName() { Guid keyType = new TKey().GenericTypeGuid; Guid valueType = new TValue().GenericTypeGuid; return(SubFileName.Create(SortedTreeFile.PrimaryArchiveType, keyType, valueType)); }
public void TestSequentialWriteAmplification() { MemoryPoolTest.TestMemoryLeak(); double size; Stats.ChecksumCount = 0; DiskIoSession.WriteCount = 0; DiskIoSession.ReadCount = 0; using (TransactionalFileStructure file = TransactionalFileStructure.CreateInMemory(4096)) using (TransactionalEdit edit = file.BeginEdit()) using (SubFileStream stream = edit.CreateFile(SubFileName.CreateRandom())) using (BinaryStream bs = new BinaryStream(stream)) { Stats.ChecksumCount = 0; DiskIoSession.WriteCount = 0; DiskIoSession.ReadCount = 0; //Write 8 million for (long s = 0; s < 1000000; s++) { bs.Write(s); } size = bs.Position / 4096.0; } System.Console.WriteLine("Read: " + (DiskIoSession.ReadCount / size).ToString("0.0")); System.Console.WriteLine("Write: " + (DiskIoSession.WriteCount / size).ToString("0.0")); System.Console.WriteLine("Checksums: " + (Stats.ChecksumCount / size).ToString("0.0")); MemoryPoolTest.TestMemoryLeak(); }
private void CreateArchiveFile <TKey, TValue>(SubFileName fileName, EncodingDefinition storageMethod, int maxSortedTreeBlockSize) where TKey : SnapTypeBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { if (maxSortedTreeBlockSize < 1024) { throw new ArgumentOutOfRangeException(nameof(maxSortedTreeBlockSize), "Must be greater than 1024"); } if ((object)storageMethod == null) { throw new ArgumentNullException("storageMethod"); } using (TransactionalEdit trans = m_fileStructure.BeginEdit()) { using (SubFileStream fs = trans.CreateFile(fileName)) using (BinaryStream bs = new BinaryStream(fs)) { int blockSize = m_fileStructure.Snapshot.Header.DataBlockSize; while (blockSize > maxSortedTreeBlockSize) { blockSize >>= 2; } SortedTree <TKey, TValue> tree = SortedTree <TKey, TValue> .Create(bs, blockSize, storageMethod); tree.Flush(); } trans.ArchiveType = FileType; trans.CommitAndDispose(); } }
private SubFileName GetFileName <TKey, TValue>(string fileName) where TKey : SnapTypeBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { Guid keyType = new TKey().GenericTypeGuid; Guid valueType = new TValue().GenericTypeGuid; return(SubFileName.Create(fileName, keyType, valueType)); }
/// <summary> /// Creates a SortedTreeTable /// </summary> /// <param name="fileStructure"></param> /// <param name="fileName"></param> /// <param name="baseFile"></param> internal SortedTreeTable(TransactionalFileStructure fileStructure, SubFileName fileName, SortedTreeFile baseFile) { BaseFile = baseFile; m_fileName = fileName; m_fileStructure = fileStructure; m_firstKey = new TKey(); m_lastKey = new TKey(); using (SortedTreeTableReadSnapshot <TKey, TValue> snapshot = BeginRead()) { snapshot.GetKeyRange(m_firstKey, m_lastKey); } }
public void Test() { Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); Random rand = new Random(); ushort fileIdNumber = (ushort)rand.Next(int.MaxValue); SubFileName fileName = SubFileName.CreateRandom(); int dataBlock1 = rand.Next(int.MaxValue); int singleRedirect = rand.Next(int.MaxValue); int doubleRedirect = rand.Next(int.MaxValue); int tripleRedirect = rand.Next(int.MaxValue); int quadrupleRedirect = rand.Next(int.MaxValue); SubFileHeader node = new SubFileHeader(fileIdNumber, fileName, isImmutable: false, isSimplified: false); node.DirectBlock = (uint)dataBlock1; node.SingleIndirectBlock = (uint)singleRedirect; node.DoubleIndirectBlock = (uint)doubleRedirect; node.TripleIndirectBlock = (uint)tripleRedirect; node.QuadrupleIndirectBlock = (uint)quadrupleRedirect; SubFileHeader node2 = SaveItem(node); if (node2.FileIdNumber != fileIdNumber) { throw new Exception(); } if (node2.FileName != fileName) { throw new Exception(); } if (node2.DirectBlock != dataBlock1) { throw new Exception(); } if (node2.SingleIndirectBlock != singleRedirect) { throw new Exception(); } if (node2.DoubleIndirectBlock != doubleRedirect) { throw new Exception(); } if (node2.TripleIndirectBlock != tripleRedirect) { throw new Exception(); } if (node2.QuadrupleIndirectBlock != quadrupleRedirect) { throw new Exception(); } Assert.IsTrue(true); Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); }
/// <summary> /// Opens the default table for this TKey and TValue. If it does not exists, /// it will be created with the provided compression method. /// </summary> /// <typeparam name="TKey"></typeparam> /// <typeparam name="TValue"></typeparam> /// <param name="storageMethod">The method of compression to utilize in this table.</param> /// <param name="maxSortedTreeBlockSize">the maximum desired block size for a SortedTree. Must be at least 1024.</param> /// <returns></returns> public SortedTreeTable <TKey, TValue> OpenOrCreateTable <TKey, TValue>(EncodingDefinition storageMethod, int maxSortedTreeBlockSize = 4096) where TKey : SnapTypeBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { if ((object)storageMethod == null) { throw new ArgumentNullException("storageMethod"); } SubFileName fileName = GetFileName <TKey, TValue>(); return(OpenOrCreateTable <TKey, TValue>(storageMethod, fileName, maxSortedTreeBlockSize)); }
internal SortedTreeTableReadSnapshot(ReadSnapshot currentTransaction, SubFileName fileName) { try { m_subStream = currentTransaction.OpenFile(fileName); m_binaryStream = new BinaryStream(m_subStream); m_tree = SortedTree <TKey, TValue> .Open(m_binaryStream); } catch { Dispose(); throw; } }
/// <summary> /// Opens the table for the provided file name. /// </summary> /// <typeparam name="TKey"></typeparam> /// <typeparam name="TValue"></typeparam> /// <param name="fileName">the filename to open</param> /// <returns>null if table does not exist</returns> private SortedTreeTable <TKey, TValue> OpenTable <TKey, TValue>(SubFileName fileName) where TKey : SnapTypeBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { if (!m_openedFiles.ContainsKey(fileName)) { if (!m_fileStructure.Snapshot.Header.ContainsSubFile(fileName)) { return(null); } m_openedFiles.Add(fileName, new SortedTreeTable <TKey, TValue>(m_fileStructure, fileName, this)); } return((SortedTreeTable <TKey, TValue>)m_openedFiles[fileName]); }
private static void TestBinaryStream(DiskIo stream) { FileHeaderBlock header = stream.LastCommittedHeader; header = header.CloneEditable(); SubFileHeader node = header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); SubFileStream ds = new SubFileStream(stream, node, header, false); BinaryStreamTest.Test(ds); }
public void Test() { int blockSize = 4096; Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); DiskIo stream = DiskIo.CreateMemoryFile(Globals.MemoryPool, blockSize); SubFileName name = SubFileName.CreateRandom(); SubFileHeader node = new SubFileHeader(1, name, false, false); SubFileDiskIoSessionPool pool = new SubFileDiskIoSessionPool(stream, stream.LastCommittedHeader, node, true); IndexParser parse = new IndexParser(pool); parse.SetPositionAndLookup(14312); pool.Dispose(); Assert.IsTrue(true); Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); }
public void Test() { MemoryPoolTest.TestMemoryLeak(); Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); FileHeaderBlock header = FileHeaderBlock.CreateNew(4096); header = header.CloneEditable(); header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); header.IsReadOnly = true; FileHeaderBlock header2 = FileHeaderBlock.Open(header.GetBytes()); CheckEqual(header2, header); Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); //verify they are the same; MemoryPoolTest.TestMemoryLeak(); }
private static void TestReadAndWrites(DiskIo stream) { FileHeaderBlock header = stream.LastCommittedHeader; header = header.CloneEditable(); SubFileHeader node = header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); header.CreateNewFile(SubFileName.CreateRandom()); SubFileStream ds = new SubFileStream(stream, node, header, false); TestSingleByteWrite(ds); TestSingleByteRead(ds); TestCustomSizeWrite(ds, 5); TestCustomSizeRead(ds, 5); TestCustomSizeWrite(ds, BlockDataLength + 20); TestCustomSizeRead(ds, BlockDataLength + 20); stream.CommitChanges(header); }
private static void TestRollback(DiskIo stream, FileHeaderBlock fat) { SubFileName id1 = SubFileName.CreateRandom(); SubFileName id2 = SubFileName.CreateRandom(); SubFileName id3 = SubFileName.CreateRandom(); TransactionalEdit trans = new TransactionalEdit(stream); //create 3 files additional files SubFileStream fs21 = trans.CreateFile(id1); SubFileStream fs22 = trans.CreateFile(id2); SubFileStream fs23 = trans.CreateFile(id3); //open files SubFileStream fs1 = trans.OpenFile(0); SubFileStream fs2 = trans.OpenFile(1); SubFileStream fs3 = trans.OpenFile(2); //read from them and verify content. SubFileStreamTest.TestSingleByteRead(fs2); SubFileStreamTest.TestCustomSizeRead(fs3, 5); SubFileStreamTest.TestCustomSizeRead(fs1, BlockDataLength + 20); //rewrite bad data. SubFileStreamTest.TestSingleByteWrite(fs3); SubFileStreamTest.TestCustomSizeWrite(fs1, 5); SubFileStreamTest.TestCustomSizeWrite(fs2, BlockDataLength + 20); fs1.Dispose(); fs2.Dispose(); fs3.Dispose(); fs21.Dispose(); fs22.Dispose(); fs23.Dispose(); trans.RollbackAndDispose(); }
/// <summary> /// Opens a ArchiveFileStream that can be used to read/write to the file passed to this function. /// </summary> /// <returns></returns> public SubFileStream OpenFile(SubFileName fileName) { for (int x = 0; x < m_fileHeaderBlock.Files.Count; x++) { SubFileHeader file = m_fileHeaderBlock.Files[x]; if (file.FileName == fileName) { return OpenFile(x); } } throw new Exception("File does not exist"); }
public void Test() { Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); //string file = Path.GetTempFileName(); //System.IO.File.Delete(file); try { //using (FileSystemSnapshotService service = FileSystemSnapshotService.CreateFile(file)) using (TransactionalFileStructure service = TransactionalFileStructure.CreateInMemory(BlockSize)) { using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream fs = edit.CreateFile(SubFileName.CreateRandom()); BinaryStream bs = new BinaryStream(fs); bs.Write((byte)1); bs.Dispose(); fs.Dispose(); edit.CommitAndDispose(); } { ReadSnapshot read = service.Snapshot; SubFileStream f1 = read.OpenFile(0); BinaryStream bs1 = new BinaryStream(f1); if (bs1.ReadUInt8() != 1) { throw new Exception(); } using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream f2 = edit.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); if (bs2.ReadUInt8() != 1) { throw new Exception(); } bs2.Write((byte)3); bs2.Dispose(); } //rollback should be issued; if (bs1.ReadUInt8() != 0) { throw new Exception(); } bs1.Dispose(); { ReadSnapshot read2 = service.Snapshot; SubFileStream f2 = read2.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); if (bs2.ReadUInt8() != 1) { throw new Exception(); } if (bs2.ReadUInt8() != 0) { throw new Exception(); } bs2.Dispose(); } } using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream f2 = edit.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); bs2.Write((byte)13); bs2.Write((byte)23); bs2.Dispose(); edit.RollbackAndDispose(); } //rollback should be issued; } } finally { //System.IO.File.Delete(file); } Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); Assert.IsTrue(true); }
/// <summary> /// Creates and Opens a new file on the current file system. /// </summary> /// <returns></returns> public ISupportsBinaryStream CreateFile(SubFileName fileName) { if (m_disposed) throw new ObjectDisposedException(GetType().FullName); CloseCurrentFile(); var subFile = m_fileHeaderBlock.CreateNewFile(fileName); subFile.DirectBlock = m_fileHeaderBlock.LastAllocatedBlock + 1; m_subFileStream = new SimplifiedSubFileStream(m_stream, subFile, m_fileHeaderBlock); return m_subFileStream; }
private SortedTreeTable <TKey, TValue> OpenOrCreateTable <TKey, TValue>(EncodingDefinition storageMethod, SubFileName fileName, int maxSortedTreeBlockSize) where TKey : SnapTypeBase <TKey>, new() where TValue : SnapTypeBase <TValue>, new() { if (!m_openedFiles.ContainsKey(fileName)) { if (!m_fileStructure.Snapshot.Header.ContainsSubFile(fileName)) { CreateArchiveFile <TKey, TValue>(fileName, storageMethod, maxSortedTreeBlockSize); } m_openedFiles.Add(fileName, new SortedTreeTable <TKey, TValue>(m_fileStructure, fileName, this)); } return((SortedTreeTable <TKey, TValue>)m_openedFiles[fileName]); }
internal SortedTreeTableSnapshotInfo(TransactionalFileStructure fileStructure, SubFileName fileName) { m_fileName = fileName; m_fileStructure = fileStructure; m_currentTransaction = m_fileStructure.Snapshot; }