public void AppendAndGet_WithSimpleData_ShouldAppendAndGetCorrectly() { //Arrange var data = Guid.NewGuid().ToByteArray(); var capacity = data.Length; //Action IndexData indexData; using (var storage = new FileStorage(_storageFilePath, capacity)) { indexData = storage.Append(new MemoryStream(data)); } //Assert using (var storage = new FileStorage(_storageFilePath, capacity)) { using (var resultStream = storage.Get(indexData)) { byte[] hash1, hash2; Md5Helper.ComputeHashes(data, resultStream, out hash1, out hash2); Assert.IsTrue(hash1.SequenceEqual(hash2)); } } }
public void Rollback_WhenAnyErrorHappen_ShouldRollback() { //Arrange var data = Guid.NewGuid().ToByteArray(); var capacity = data.Length; //Action IndexData indexData; using (var storage = new FileStorage(_storageFilePath, capacity)) { indexData = storage.Append(new MemoryStream(data)); try { storage.Append(null); } catch (Exception) { storage.Rollback(); } } //Assert using (var storage = new FileStorage(_storageFilePath, capacity)) { using (var resultStream = storage.Get(indexData)) { byte[] hash1, hash2; Md5Helper.ComputeHashes(data, resultStream, out hash1, out hash2); Assert.IsTrue(hash1.SequenceEqual(hash2)); } } }
public void AppendAndGet_WithBigData_ShouldAppendAndGetCorrectly() { //Arrange const long step = Int32.MaxValue / 100; var buffer = new byte[step]; for (var i = 0; i < buffer.Length; i++) { var value = (byte)(i % 2 == 0 ? 0 : 1); buffer[i] = value; } var capacity = buffer.Length; //Action IndexData indexData; using (var storage = new FileStorage(_storageFilePath, capacity)) { indexData = storage.Append(new MemoryStream(buffer)); } //Assert using (var storage = new FileStorage(_storageFilePath, capacity)) { using (var resultStream = storage.Get(indexData)) { byte[] hash1, hash2; Md5Helper.ComputeHashes(buffer, resultStream, out hash1, out hash2); Assert.IsTrue(hash1.SequenceEqual(hash2)); } } }
public void Get_AfterResized_ShouldGetAppendedData() { //Arrange var datas = new[] { Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray() }; var capacity = datas.Sum(d => d.Length) - 16; //Action using (var storage = new FileStorage(_storageFilePath, capacity)) { var indexData = storage.Append(new MemoryStream(datas[0])); using (var resultStream = storage.Get(indexData)) { storage.Append(new MemoryStream(datas[1])); byte[] hash1, hash2; Md5Helper.ComputeHashes(datas[0], resultStream, out hash1, out hash2); //Assert Assert.IsTrue(hash1.SequenceEqual(hash2)); } } }
public void Append_WithOverCapacity_IncreaseMemoryMappedFileCapacity() { //Arrange var datas = new[] { Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray() }; var capacity = datas.Sum(d => d.Length) - 16; //Action using (var storage = new FileStorage(_storageFilePath, capacity)) { foreach (var data in datas) { storage.Append(new MemoryStream(data)); } } //Assert using (var storage = new FileStorage(_storageFilePath, capacity)) { var offset = DefaultSizes.CursorHolderSize; foreach (var data in datas) { using (var resultStream = storage.Get(new IndexData { Offset = offset, Md5Hash = null, Size = data.Length })) { byte[] hash1, hash2; Md5Helper.ComputeHashes(data, resultStream, out hash1, out hash2); Assert.IsTrue(hash1.SequenceEqual(hash2)); } offset += SizeOfGuid; } } }