public void Rollback_WhenAnyErrorHappen_ShouldRollback()
        {
            //Arrange
            var data     = Guid.NewGuid().ToByteArray();
            var capacity = data.Length;

            //Action
            IndexData indexData;

            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                indexData = storage.Append(new MemoryStream(data));
                try
                {
                    storage.Append(null);
                }
                catch (Exception)
                {
                    storage.Rollback();
                }
            }

            //Assert
            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                using (var resultStream = storage.Get(indexData))
                {
                    byte[] hash1, hash2;
                    Md5Helper.ComputeHashes(data, resultStream, out hash1, out hash2);

                    Assert.IsTrue(hash1.SequenceEqual(hash2));
                }
            }
        }
        public void Appent_OverCapacity_Test()
        {
            var       data     = new[] { Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray() };
            const int capacity = SizeOfGuid + SizeOfGuid / 2;

            using (var target = new FileStorage(_storageFilePath, capacity)) {
                foreach (var item in data)
                {
                    target.Append(new MemoryStream(item));
                }
            }

            using (var target = new FileStorage(_storageFilePath, capacity)) {
                var offset = PositionHolderSize;
                foreach (var item in data)
                {
                    MemoryStream ms;
                    using (var stream = target.Get(
                               new IndexData {
                        Offset = offset,
                        Md5Hash = null,
                        Size = item.Length
                    })) {
                        ms = new MemoryStream();
                        stream.CopyTo(ms);
                    }
                    offset += SizeOfGuid;

                    Assert.IsTrue(item.SequenceEqual(ms.ToArray()));
                }
            }
        }
        public void Append_Test()
        {
            using (var target = new FileStorage(_storageFilePath, TestCapacity)) {
                var ms = new MemoryStream();
                ms.Write(Guid.NewGuid().ToByteArray(), 0, SizeOfGuid);
                ms.Write(Guid.NewGuid().ToByteArray(), 0, SizeOfGuid);
                ms.Position = 0;
                target.Append(ms);
            }

            var       data = Guid.NewGuid().ToByteArray();
            IndexData indexData;

            using (var target = new FileStorage(_storageFilePath, TestCapacity)) {
                indexData = target.Append(new MemoryStream(data));
            }

            using (var file = new FileStream(_storageFilePath, FileMode.Open)) {
                var position = ReadAndCheckPosition(file, SizeOfGuid * 3);

                var actual = new byte[SizeOfGuid];
                file.Position = SizeOfGuid * 2 + PositionHolderSize;
                file.Read(actual, 0, actual.Length);

                Check(data, PositionHolderSize + SizeOfGuid * 2, actual, indexData, position);
            }
        }
        public void Appent_BigData_Test()
        {
            var       bigData  = Guid.NewGuid().ToByteArray();
            const int capacity = SizeOfGuid / 4;

            using (var target = new FileStorage(_storageFilePath, capacity)) {
                target.Append(new MemoryStream(bigData));
            }

            using (var target = new FileStorage(_storageFilePath, 0))
                using (var stream = target.Get(
                           new IndexData {
                    Offset = PositionHolderSize,
                    Md5Hash = null,
                    Size = SizeOfGuid
                })) {
                    var ms = new MemoryStream();
                    stream.CopyTo(ms);

                    Assert.IsTrue(bigData.SequenceEqual(ms.ToArray()));
                }

            using (var file = new FileStream(_storageFilePath, FileMode.Open)) {
                var position = ReadAndCheckPosition(file, SizeOfGuid);
                Assert.AreEqual(position, file.Length);
            }
        }
        public void Get_AfterResize_Test()
        {
            var       data     = new[] { Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray() };
            const int capacity = SizeOfGuid + SizeOfGuid / 3;

            using (var target = new FileStorage(_storageFilePath, capacity)) {
                var indexData = target.Append(new MemoryStream(data[0]));
                using (var stream = target.Get(indexData)) {
                    target.Append(new MemoryStream(data[1]));

                    var ms = new MemoryStream();
                    stream.CopyTo(ms);
                    Assert.IsTrue(data[0].SequenceEqual(ms.ToArray()));
                }
            }
        }
        public void AppendAndGet_WithSimpleData_ShouldAppendAndGetCorrectly()
        {
            //Arrange
            var data     = Guid.NewGuid().ToByteArray();
            var capacity = data.Length;

            //Action
            IndexData indexData;

            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                indexData = storage.Append(new MemoryStream(data));
            }

            //Assert
            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                using (var resultStream = storage.Get(indexData))
                {
                    byte[] hash1, hash2;
                    Md5Helper.ComputeHashes(data, resultStream, out hash1, out hash2);

                    Assert.IsTrue(hash1.SequenceEqual(hash2));
                }
            }
        }
        public void Append_WithOverFreeDiskSpace_ThrowNotEnoughDiskSpaceException()
        {
            //Arrange
            var bigCapacity = Sizes.Size1Tb * 10; //Assumes 10Tb will exceed any exists hard disk capacity
            var path        = string.Empty;

            //Action
            using (var storage = new FileStorage(_storageFilePath, bigCapacity))
            {
                path = Path.GetTempFileName();

                using (var stream = new FileStream(path, FileMode.Open))
                {
                    storage.Append(stream);
                }
            }

            //Assert
            //Should throw NotEnoughDiskSpaceException

            if (string.IsNullOrEmpty(path))
            {
                File.Delete(path);
            }
        }
        public void AppendAndGet_WithBigData_ShouldAppendAndGetCorrectly()
        {
            //Arrange
            const long step   = Int32.MaxValue / 100;
            var        buffer = new byte[step];

            for (var i = 0; i < buffer.Length; i++)
            {
                var value = (byte)(i % 2 == 0 ? 0 : 1);
                buffer[i] = value;
            }

            var capacity = buffer.Length;

            //Action
            IndexData indexData;

            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                indexData = storage.Append(new MemoryStream(buffer));
            }

            //Assert
            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                using (var resultStream = storage.Get(indexData))
                {
                    byte[] hash1, hash2;
                    Md5Helper.ComputeHashes(buffer, resultStream, out hash1, out hash2);

                    Assert.IsTrue(hash1.SequenceEqual(hash2));
                }
            }
        }
        public void Get_AfterResized_ShouldGetAppendedData()
        {
            //Arrange
            var datas    = new[] { Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray() };
            var capacity = datas.Sum(d => d.Length) - 16;

            //Action
            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                var indexData = storage.Append(new MemoryStream(datas[0]));
                using (var resultStream = storage.Get(indexData))
                {
                    storage.Append(new MemoryStream(datas[1]));

                    byte[] hash1, hash2;
                    Md5Helper.ComputeHashes(datas[0], resultStream, out hash1, out hash2);

                    //Assert
                    Assert.IsTrue(hash1.SequenceEqual(hash2));
                }
            }
        }
Example #10
0
        public void Append_Empty_Test()
        {
            using (var target = new FileStorage(_storageFilePath, TestCapacity)) {
                var indexData = target.Append(new MemoryStream());
                using (var stream = target.Get(indexData)) {
                    Assert.AreEqual(0, stream.Length);
                }
            }
            using (var file = new FileStream(_storageFilePath, FileMode.Open)) {
                ReadAndCheckPosition(file, 0);
            }

            Assert.AreEqual(TestCapacity + PositionHolderSize, new FileInfo(_storageFilePath).Length);
        }
        public void Append_WithNullStream_ThrowArgumentNullException()
        {
            //Arrange
            using (var storage = new FileStorage(_storageFilePath, StorageBinTestCapacity))
            {
                FileStream fs = null;

                //Action
                storage.Append(fs);
            }

            //Assert
            //Should throw ArgumentNullException
        }
        public void AppendAndGet_WithEmptyData_ShouldReturnEmptData()
        {
            //Arrange
            using (var storage = new FileStorage(_storageFilePath, StorageBinTestCapacity))
            {
                //Action
                var indexData = storage.Append(new MemoryStream());
                using (var resultStream = storage.Get(indexData))
                {
                    //Assert
                    Assert.AreEqual(0, resultStream.Length);
                }
            }

            //Assert
            Assert.AreEqual(StorageBinTestCapacity + DefaultSizes.CursorHolderSize, new FileInfo(_storageFilePath).Length);
        }
Example #13
0
        public void Create_Test()
        {
            var data = Guid.NewGuid().ToByteArray();

            IndexData indexData;

            using (var target = new FileStorage(_storageFilePath, TestCapacity, 12)) {
                var inputStream = new MemoryStream(data);

                indexData = target.Append(inputStream);
            }

            using (var file = new FileStream(_storageFilePath, FileMode.Open)) {
                var position = ReadAndCheckPosition(file, SizeOfGuid);

                var actual = new byte[SizeOfGuid];
                file.Read(actual, 0, SizeOfGuid);

                Check(data, PositionHolderSize, actual, indexData, position);
            }
        }
Example #14
0
        public void Get_Test()
        {
            using (var target = new FileStorage(_storageFilePath, TestCapacity)) {
                var list = new List <KeyValuePair <IndexData, byte[]> >();
                for (var i = 0; i < 10; i++)
                {
                    var data      = Guid.NewGuid().ToByteArray();
                    var indexData = target.Append(new MemoryStream(data));
                    list.Add(new KeyValuePair <IndexData, byte[]>(indexData, data));
                }

                foreach (var item in list.OrderBy(x => Guid.NewGuid()))
                {
                    using (var stream = target.Get(item.Key)) {
                        var ms = new MemoryStream();
                        stream.CopyTo(ms);
                        var buffer = ms.ToArray();

                        Assert.IsTrue(item.Value.SequenceEqual(buffer));
                    }
                }
            }
        }
Example #15
0
        public void Parallel_Test()
        {
            const int count               = 10000;
            var       dictionary          = Enumerable.Range(0, count).ToDictionary(x => x, x => x.ToString("00000"));
            var       indexes             = new IndexData[dictionary.Count];
            const int degreeOfParallelism = 4;

            using (var target = new FileStorage(_storageFilePath, TestCapacity)) {
                dictionary.AsParallel()
                .WithDegreeOfParallelism(degreeOfParallelism)
                .ForAll(
                    pair => {
                    var bytes = Encoding.UTF8.GetBytes(pair.Value);
                    // ReSharper disable once AccessToDisposedClosure
                    var indexData     = target.Append(new MemoryStream(bytes));
                    indexes[pair.Key] = indexData;
                });
            }

            using (var target = new FileStorage(_storageFilePath, TestCapacity)) {
                Enumerable.Range(0, count)
                .AsParallel()
                .WithDegreeOfParallelism(degreeOfParallelism)
                .ForAll(
                    i => {
                    // ReSharper disable once AccessToDisposedClosure
                    using (var stream = target.Get(indexes[i])) {
                        var ms = new MemoryStream();
                        stream.CopyTo(ms);
                        var buffer = ms.ToArray();

                        var actual = Encoding.UTF8.GetString(buffer);
                        Assert.AreEqual(dictionary[i], actual, "Wrong data");
                    }
                });
            }
        }
        public void Append_WithOverCapacity_IncreaseMemoryMappedFileCapacity()
        {
            //Arrange
            var datas    = new[] { Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray(), Guid.NewGuid().ToByteArray() };
            var capacity = datas.Sum(d => d.Length) - 16;

            //Action
            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                foreach (var data in datas)
                {
                    storage.Append(new MemoryStream(data));
                }
            }

            //Assert
            using (var storage = new FileStorage(_storageFilePath, capacity))
            {
                var offset = DefaultSizes.CursorHolderSize;
                foreach (var data in datas)
                {
                    using (var resultStream = storage.Get(new IndexData
                    {
                        Offset = offset,
                        Md5Hash = null,
                        Size = data.Length
                    }))
                    {
                        byte[] hash1, hash2;
                        Md5Helper.ComputeHashes(data, resultStream, out hash1, out hash2);

                        Assert.IsTrue(hash1.SequenceEqual(hash2));
                    }
                    offset += SizeOfGuid;
                }
            }
        }