private void ReadStreamProgressEventsTest(IStorage storage) { // arrange var eventCount = 0; var receivedBytes = new List <byte>(); var streamSize = 9 * _configuration.BufferBlockSize; var source = DummyData.GenerateRandomBytes(streamSize); using var sourceMemoryStream = new MemoryStream(source); var chunk = new Chunk(0, streamSize - 1) { Timeout = 100, Storage = storage }; var chunkDownloader = new ChunkDownloader(chunk, _configuration); chunkDownloader.DownloadProgressChanged += (s, e) => { eventCount++; receivedBytes.AddRange(e.ReceivedBytes); }; // act chunkDownloader.ReadStream(sourceMemoryStream, new CancellationToken()).Wait(); // assert Assert.AreEqual(streamSize / _configuration.BufferBlockSize, eventCount); Assert.AreEqual(chunkDownloader.Chunk.Length, receivedBytes.Count); Assert.IsTrue(source.SequenceEqual(receivedBytes)); chunkDownloader.Chunk.Clear(); }
private void MergeChunksTest(bool onTheFly) { // arrange var fileSize = 10240; var chunkCount = 8; var counter = 0; _configuration.OnTheFlyDownload = onTheFly; var chunkHub = new ChunkHub(_configuration); Chunk[] chunks = chunkHub.ChunkFile(fileSize, chunkCount); List <byte[]> chunksData = new List <byte[]>(); foreach (Chunk chunk in chunks) { var dummyBytes = DummyData.GenerateRandomBytes((int)chunk.Length); chunksData.Add(dummyBytes); chunk.Storage.WriteAsync(dummyBytes, 0, dummyBytes.Length).Wait(); } // act using MemoryStream destinationStream = new MemoryStream(); chunkHub.MergeChunks(chunks, destinationStream, new CancellationToken()).Wait(); // assert var mergedData = destinationStream.ToArray(); foreach (byte[] chunkData in chunksData) { foreach (var byteOfChunkData in chunkData) { Assert.AreEqual(byteOfChunkData, mergedData[counter++]); } } }
private void TestStreamReadSpeed(ThrottledStreamRead readMethod) { // arrange var limitationCoefficient = 0.8; // 80% var size = 10240; // 10KB var maxBytesPerSecond = 1024; // 1024 Byte/s var expectedTime = (size / maxBytesPerSecond) * 1000 * limitationCoefficient; // 80% of 10000 Milliseconds var randomBytes = DummyData.GenerateRandomBytes(size); var buffer = new byte[maxBytesPerSecond / 8]; var readSize = 1; using Stream stream = new ThrottledStream(new MemoryStream(randomBytes), maxBytesPerSecond); var stopWatcher = Stopwatch.StartNew(); // act stream.Seek(0, SeekOrigin.Begin); while (readSize > 0) { readSize = readMethod(stream, buffer, 0, buffer.Length); } stopWatcher.Stop(); // assert Assert.IsTrue(stopWatcher.ElapsedMilliseconds >= expectedTime, $"expected duration is: {expectedTime}ms , but actual duration is: {stopWatcher.ElapsedMilliseconds}ms"); }
private void ReadStreamTest(IStorage storage) { // arrange var streamSize = 20480; var randomlyBytes = DummyData.GenerateRandomBytes(streamSize); var chunk = new Chunk(0, streamSize - 1) { Timeout = 100, Storage = storage }; var chunkDownloader = new ChunkDownloader(chunk, _configuration); using var memoryStream = new MemoryStream(randomlyBytes); // act chunkDownloader.ReadStream(memoryStream, new CancellationToken()).Wait(); // assert Assert.AreEqual(memoryStream.Length, chunkDownloader.Chunk.Storage.GetLength()); var chunkStream = chunkDownloader.Chunk.Storage.OpenRead(); for (int i = 0; i < streamSize; i++) { Assert.AreEqual(randomlyBytes[i], chunkStream.ReadByte()); } chunkDownloader.Chunk.Clear(); }
private void TestStreamReadSpeed(ThrottledStreamRead readMethod) { // arrange var size = 1024; var maxBytesPerSecond = 256; // 256 Byte/s var slowExpectedTime = (size / maxBytesPerSecond) * 1000; // 4000 Milliseconds var fastExpectedTime = slowExpectedTime * 0.75; // 3000 Milliseconds var randomBytes = DummyData.GenerateRandomBytes(size); var buffer = new byte[maxBytesPerSecond / 8]; var readSize = 1; using Stream stream = new ThrottledStream(new MemoryStream(randomBytes), maxBytesPerSecond); var stopWatcher = Stopwatch.StartNew(); // act stream.Seek(0, SeekOrigin.Begin); while (readSize > 0) { readSize = readMethod(stream, buffer, 0, buffer.Length); } stopWatcher.Stop(); // assert Assert.IsTrue(stopWatcher.ElapsedMilliseconds >= fastExpectedTime, $"actual duration is: {stopWatcher.ElapsedMilliseconds}ms"); Assert.IsTrue(stopWatcher.ElapsedMilliseconds <= slowExpectedTime, $"actual duration is: {stopWatcher.ElapsedMilliseconds}ms"); }
public void MergeChunksByFileStorageTest() { // arrange var fileSize = 1024; var chunkCount = 8; var counter = 0; var mergedFilename = FileHelper.GetTempFile(""); Chunk[] chunks = _chunkHub.ChunkFile(fileSize, chunkCount); foreach (Chunk chunk in chunks) { chunk.Storage = new FileStorage(Path.GetTempPath()); var dummyBytes = DummyData.GenerateRandomBytes((int)chunk.Length); chunk.Storage.WriteAsync(dummyBytes, 0, dummyBytes.Length).Wait(); } // act _chunkHub.MergeChunks(chunks, mergedFilename).Wait(); // assert Assert.IsTrue(File.Exists(mergedFilename)); var mergedData = File.ReadAllBytes(mergedFilename); foreach (Chunk chunk in chunks) { var chunkStream = chunk.Storage.OpenRead(); for (int i = 0; i < chunkStream.Length; i++) { Assert.AreEqual(chunkStream.ReadByte(), mergedData[counter++]); } chunk.Clear(); } }
public void ReadStreamTest() { // arrange var streamSize = 2048; Chunk = new Chunk(0, streamSize - 1) { Timeout = 100 }; CreateChunkStorage(); var randomlyBytes = DummyData.GenerateRandomBytes(streamSize); using var memoryStream = new MemoryStream(randomlyBytes); // act ReadStream(memoryStream, new CancellationToken()).Wait(); // assert Assert.AreEqual(memoryStream.Length, Chunk.Storage.GetLength()); var chunkStream = Chunk.Storage.OpenRead(); for (int i = 0; i < streamSize; i++) { Assert.AreEqual(randomlyBytes[i], chunkStream.ReadByte()); } Chunk.Clear(); }
public void TestSetValidPositionWhenStorageChanged() { // arrange var nextPosition = 512; var chunk = new Chunk(0, 1024) { Position = 1, Storage = Storage }; // act Storage.WriteAsync(DummyData.GenerateRandomBytes(nextPosition), 0, nextPosition); chunk.SetValidPosition(); // assert Assert.AreEqual(nextPosition, chunk.Position); }
public void TestStreamWrite() { var size = 1024; var bytesPerSecond = 256; // 32 B/s var randomBytes = DummyData.GenerateRandomBytes(size); using Stream tar = new ThrottledStream(new MemoryStream(), bytesPerSecond); tar.Seek(0, SeekOrigin.Begin); var start = Environment.TickCount64; tar.Write(randomBytes, 0, randomBytes.Length); var elapsed = Environment.TickCount64 - start; var expectedTime = (size / bytesPerSecond) * 1000; Assert.IsTrue(elapsed >= expectedTime); }
public void IsDownloadCompletedWhenMemoryStorageDataIsExistTest() { // arrange var size = 1024; var chunk = new Chunk(0, size - 1) { Storage = new MemoryStorage(size), Position = size - 1 }; chunk.Storage.WriteAsync(DummyData.GenerateRandomBytes(size), 0, size).Wait(); // act bool isDownloadCompleted = chunk.IsDownloadCompleted(); // assert Assert.IsTrue(isDownloadCompleted); }
public void ReadStreamTimeoutExceptionTest() { // arrange var streamSize = 20480; var randomlyBytes = DummyData.GenerateRandomBytes(streamSize); var chunk = new Chunk(0, streamSize - 1) { Timeout = 100 }; var chunkDownloader = new ChunkDownloader(chunk, _configuration); using var memoryStream = new MemoryStream(randomlyBytes); var canceledToken = new CancellationToken(true); // act async Task CallReadStream() => await chunkDownloader.ReadStream(new MemoryStream(), canceledToken).ConfigureAwait(false); // assert Assert.ThrowsExceptionAsync <OperationCanceledException>(CallReadStream); }
private void TestStreamWriteSpeed(ThrottledStreamWrite writeMethod) { // arrange var size = 1024; var bytesPerSecond = 256; // 256 B/s var tolerance = 50; // 50 ms var expectedTime = (size / bytesPerSecond) * 1000; // 4000 Milliseconds var randomBytes = DummyData.GenerateRandomBytes(size); using Stream stream = new ThrottledStream(new MemoryStream(), bytesPerSecond); var stopWatcher = Stopwatch.StartNew(); // act writeMethod(stream, randomBytes, 0, randomBytes.Length); stopWatcher.Stop(); // assert Assert.IsTrue(stopWatcher.ElapsedMilliseconds + tolerance >= expectedTime, $"actual duration is: {stopWatcher.ElapsedMilliseconds}ms"); }
public void TestPackageSituationAfterDispose() { // arrange var sampleDataLength = 1024; Package.TotalFileSize = sampleDataLength * 64; Package.Chunks = new[] { new Chunk(0, Package.TotalFileSize) }; Package.Chunks[0].Storage = new MemoryStorage(); Package.Chunks[0].Storage.WriteAsync(DummyData.GenerateRandomBytes(sampleDataLength), 0, sampleDataLength); Package.Chunks[0].SetValidPosition(); // act Dispose(); // assert Assert.IsNotNull(Package.Chunks); Assert.AreEqual(sampleDataLength, Package.ReceivedBytesSize); Assert.AreEqual(sampleDataLength * 64, Package.TotalFileSize); Package.Clear(); }
public void TestStreamRead() { var size = 1024; var bytesPerSecond = 256; // 256 B/s var randomBytes = DummyData.GenerateRandomBytes(size); using Stream src = new ThrottledStream(new MemoryStream(randomBytes), bytesPerSecond); src.Seek(0, SeekOrigin.Begin); byte[] buf = new byte[bytesPerSecond]; int read = 1; long start = Environment.TickCount64; while (read > 0) { read = src.Read(buf, 0, buf.Length); } long elapsed = Environment.TickCount64 - start; var expectedTime = (size / bytesPerSecond) * 1000; Assert.IsTrue(elapsed >= expectedTime); }
private void MergeChunksTest(bool onTheFly) { // arrange var fileSize = 10240; var chunkCount = 8; var counter = 0; _configuration.OnTheFlyDownload = onTheFly; var chunkHub = new ChunkHub(_configuration); var mergedFilename = FileHelper.GetTempFile(""); Chunk[] chunks = chunkHub.ChunkFile(fileSize, chunkCount); List <byte[]> chunksData = new List <byte[]>(); foreach (Chunk chunk in chunks) { var dummyBytes = DummyData.GenerateRandomBytes((int)chunk.Length); chunksData.Add(dummyBytes); chunk.Storage.WriteAsync(dummyBytes, 0, dummyBytes.Length).Wait(); } // act chunkHub.MergeChunks(chunks, mergedFilename).Wait(); // assert Assert.IsTrue(File.Exists(mergedFilename)); var mergedData = File.ReadAllBytes(mergedFilename); foreach (byte[] chunkData in chunksData) { foreach (var byteOfChunkData in chunkData) { Assert.AreEqual(byteOfChunkData, mergedData[counter++]); } } }