public void TestCorruptedLengthHeader() { var pipe = new PipeMock(); var reader = new CompressedChunksReader(pipe, 4, new LoggerMock()); var bytes1 = new byte[] { 0x12, 0x34 }; var stream = new MemoryStream(bytes1); Assert.Throws <FileCorruptedException>(() => reader.ReadFromStream(stream, new CancellationToken())); }
public void TestMissingBody() { var pipe = new PipeMock(); var reader = new CompressedChunksReader(pipe, 4, new LoggerMock()); var bytes = new byte[] { 0x12, 0x34 }; var stream = new MemoryStream(BitConverter.GetBytes(bytes.Length + 1).Concat(bytes).ToArray()); Assert.Throws <FileCorruptedException>(() => reader.ReadFromStream(stream, new CancellationToken())); }
public void EmptyStreamProducesNoChunks() { var pipe = new PipeMock(); var reader = new CompressedChunksReader(pipe, 4, new LoggerMock()); var stream = new MemoryStream(); reader.ReadFromStream(stream, new CancellationToken()); Assert.Empty(pipe.Chunks); }
public void TestSingleChunk() { var pipe = new PipeMock(); var reader = new CompressedChunksReader(pipe, 4, new LoggerMock()); var bytes = new byte[] { 0x12, 0x34 }; var stream = new MemoryStream(BitConverter.GetBytes(bytes.Length).Concat(bytes).ToArray()); reader.ReadFromStream(stream, new CancellationToken()); Assert.Single(pipe.Chunks); Assert.Equal(bytes, pipe.Chunks[0].Bytes); }
public void TestIncorrectLengthHeader() { var pipe = new PipeMock(); var reader = new CompressedChunksReader(pipe, 4, new LoggerMock()); var bytes1 = new byte[] { 0x12, 0x34 }; var bytes2 = new byte[] { 0x56, 0x78, 0x90, 0xAB, 0xCD }; var stream = new MemoryStream( BitConverter.GetBytes(bytes1.Length).Concat(bytes1) .Concat(bytes2) .ToArray()); Assert.Throws <FileCorruptedException>(() => reader.ReadFromStream(stream, new CancellationToken())); }
public Task Start(TaskParameters parameters) { _logger.Write($"Starting task with parameters: {parameters}"); var cancellationTokenSource = new CancellationTokenSource(); var inputFile = File.OpenRead(parameters.SourceFullPath); var outputFile = File.Create(parameters.DestinationFullPath); var inputPipe = new Pipe(parameters.MaxElementsInPipe); var outputPipe = new Pipe(parameters.MaxElementsInPipe); var expectedChunksCount = GetExpectedChunksCount(parameters, inputFile, outputFile); var writer = new ChunksWriter(outputPipe, _logger); IChunksReader reader = null; IEnumerable <IChunksProcessor> processors = null; switch (parameters.Mode) { case ProcessorMode.Compress: reader = new ChunksReader(inputPipe, parameters.ChunkSize, _logger); processors = Enumerable.Range(0, parameters.ParallelismDegree).Select( _ => new ChunksCompressor(inputPipe, outputPipe, _logger)); break; case ProcessorMode.Decompress: reader = new CompressedChunksReader(inputPipe, parameters.ChunkSize, _logger); processors = Enumerable.Range(0, parameters.ParallelismDegree).Select( _ => new ChunksDecompressor(inputPipe, outputPipe, _logger)); break; } var actions = new Action[] { () => { (reader ?? throw new ArgumentNullException()).ReadFromStream(inputFile, cancellationTokenSource.Token); inputFile.Close(); }, () => { writer.WriteToStream( outputFile, cancellationTokenSource.Token, expectedChunksCount, writeChunksLengths: parameters.Mode == ProcessorMode.Compress); outputFile.Close(); }, }.Concat((processors ?? throw new ArgumentNullException())
public void TestSeveralChunks() { var pipe = new PipeMock(); var reader = new CompressedChunksReader(pipe, 4, new LoggerMock()); var bytes1 = new byte[] { 0x12, 0x34 }; var bytes2 = new byte[] { 0x56, 0x78, 0x90, 0xAB, 0xCD }; var stream = new MemoryStream( BitConverter.GetBytes(bytes1.Length).Concat(bytes1) .Concat(BitConverter.GetBytes(bytes2.Length)).Concat(bytes2) .ToArray()); reader.ReadFromStream(stream, new CancellationToken()); Assert.Equal(2, pipe.Chunks.Count); Assert.Equal(bytes1, pipe.Chunks[0].Bytes); Assert.Equal(0, pipe.Chunks[0].Index); Assert.Equal(bytes2, pipe.Chunks[1].Bytes); Assert.Equal(1, pipe.Chunks[1].Index); }