private Result <int, ErrorCodes?> MultyThreadedDecompression(Stream inputStream, Stream outputStream) { using (var writerFinished = new ManualResetEvent(false)) using (var workersPool = new WorkersPool.WorkersPool(settings.ThreadsCount, log)) using (var tasksQueue = new BlockingTasksQueue(settings.CompressingQueueSize, workersPool, log)) { var outputFile = new OutputFileWithoutOffsetStore(outputStream); var blockId = 0; var writeDecompressedTask = RunDequeueingOutputWriteDecompressedBlocks(writerFinished, outputFile, tasksQueue); workersPool.PushTask(writeDecompressedTask); var needWaitTasksFinished = false; var needEndTasks = true; StreamingGzipBlock streamingGzipBlock = null; try { foreach (var block in gzipBlockSplitter.SplitBlocks(inputStream)) { if (block is StreamingGzipBlock gzipBlock) { tasksQueue.EndTasks(); needEndTasks = false; if (needWaitTasksFinished) { writerFinished.WaitOne(); needWaitTasksFinished = false; } streamingGzipBlock = gzipBlock; break; } tasksQueue.EnqueueTask( new DelegateTask( blockId++.ToString(), () => ((IndependentGzipBlock)block).Decompress()) ); needWaitTasksFinished = true; } } finally { if (needEndTasks) { tasksQueue.EndTasks(); } } if (needWaitTasksFinished) { writerFinished.WaitOne(); } streamingGzipBlock?.WriteDecompressedDataTo(outputStream); return(outputFile.CompressionRatio(inputStream.Length)); } }
public void Should_consume_many_tasks_by_many_producers_and_many_consumers(int tasksPerProducerCount, int paralelizmLevel) { using (var taskQueue = new BlockingTasksQueue(paralelizmLevel, new WorkersPool.WorkersPool(paralelizmLevel, log), new TestLog(output))) { var expectedResults = Enumerable.Range(1, 3 * tasksPerProducerCount).Cast <object>().ToArray(); var producers = new[] { Task.Run(() => CreateTasks(0, tasksPerProducerCount).ForEach(Enqueue)), Task.Run(() => CreateTasks(tasksPerProducerCount, tasksPerProducerCount).ForEach(Enqueue)), Task.Run(() => CreateTasks(tasksPerProducerCount * 2, tasksPerProducerCount).ForEach(Enqueue)) }; var consumers = new[] { Task.Run(() => Consume(1)), Task.Run(() => Consume(2)), Task.Run(() => Consume(3)) }; var producersEnds = Task.WhenAll(producers); producersEnds.Wait(); taskQueue.EndTasks(); var consumersEnds = Task.WhenAll(consumers) .ContinueWith(t => t.Result.SelectMany(x => x).ToList()); var allResults = consumersEnds.Result; producersEnds.IsCompletedSuccessfully.Should().BeTrue(); consumersEnds.IsCompletedSuccessfully.Should().BeTrue(); allResults.Should().HaveSameCount(expectedResults).And.Contain(expectedResults); void Enqueue(ITask task) { output.WriteLine("Adding " + task.Id); taskQueue.EnqueueTask(task); output.WriteLine("Added " + task.Id); } List <object> Consume(int consumerId) { var results = new List <object>(); foreach (var taskResult in taskQueue.ConsumeTaskResults()) { if (taskResult.IsFailed) { throw new InvalidOperationException("Unexpected error"); } output.WriteLine($"Consumer {consumerId} consumed {taskResult.Result}"); results.Add(taskResult.Result); } output.WriteLine($"Consumer {consumerId} results: {results.Count} => {string.Join(",", results)}"); return(results); } } }
public Result <int, ErrorCodes?> Compress([NotNull] Stream inputStream, [NotNull] Stream outputStream) { if (inputStream == null) { throw new ArgumentNullException(nameof(inputStream)); } if (outputStream == null) { throw new ArgumentNullException(nameof(outputStream)); } if (inputStream.Position == inputStream.Length) { return(ErrorCodes.NothingToCompress); } using (var workersPool = new WorkersPool.WorkersPool(settings.ThreadsCount, log)) using (var tasksQueue = new BlockingTasksQueue(settings.CompressingQueueSize, workersPool, log)) { var outputFile = new OutputFile(outputStream, settings.OffsetLabel); var(lastReadOffset, offsetError) = outputFile.GetLastOffset(); if (offsetError.HasValue) { return(offsetError); } var compressInputFileTask = RunEnqueueingInputFileBlockCompression(inputStream, lastReadOffset, tasksQueue); workersPool.PushTask(compressInputFileTask); foreach (var result in tasksQueue.ConsumeTaskResults()) { if (result.IsFailed) { if (result.Exception is CompressorException fileSystemException) { return(fileSystemException.Error); } throw result.Exception; } var resultBlock = (Block)result.Result; outputFile.Append(resultBlock.Bytes, resultBlock.Offset); } var commitError = outputFile.Commit(); if (commitError.HasValue) { return(commitError); } return(outputFile.CompressionRatio(inputStream.Length)); } }
public void Should_consume_all_produced_tasks_after_many_iterations(int tasksCount, int paralelizmLevel) { using (var taskQueue = new BlockingTasksQueue(paralelizmLevel, new WorkersPool.WorkersPool(paralelizmLevel, log), log)) { var expectedResults = Enumerable.Range(1, tasksCount).Cast <object>().ToArray(); var producing = Task.Run( () => { foreach (var task in CreateTasks(0, tasksCount)) { output.WriteLine("Adding " + task.Id); taskQueue.EnqueueTask(task); output.WriteLine("Added " + task.Id); } taskQueue.EndTasks(); output.WriteLine("producer stopped"); }); var consuming = Task.Run( () => { var results = new List <object>(); foreach (var result in taskQueue.ConsumeTaskResults()) { if (result.IsFailed) { throw new InvalidOperationException("Unexpected error"); } output.WriteLine("Consumed " + result.Result); results.Add(result.Result); } output.WriteLine("consumer stopped"); return(results); }); var allConsumedTask = Task.WhenAll(producing, consuming); allConsumedTask.Wait(5.Seconds()); allConsumedTask.IsCompletedSuccessfully.Should().BeTrue(); var allResults = consuming.Result; PrintResults(allResults); allResults.Should().BeEquivalentTo(expectedResults); } }
private static IWorkerTask RunDequeueingOutputWriteDecompressedBlocks( ManualResetEvent writerFinished, OutputFileWithoutOffsetStore outputFile, BlockingTasksQueue tasksQueue) { return(new DelegateWorkerTask( "Writer", () => { var error = WriteCompressedBlocks(); if (error != null) { throw new CompressorException(error.Value); } })); ErrorCodes?WriteCompressedBlocks() { try { foreach (var result in tasksQueue.ConsumeTaskResults()) { if (result.IsFailed) { if (result.Exception is CompressorException fileSystemException) { return(fileSystemException.Error); } throw result.Exception; } var decompressedBlock = (byte[])result.Result; var error = outputFile.Append(decompressedBlock.ToSegment()); if (error != null) { return(error.Value); } } return(null); } finally { writerFinished.Set(); } } }
private IWorkerTask RunEnqueueingInputFileBlockCompression( Stream inputStream, long offset, BlockingTasksQueue tasksQueue) { return(new DelegateWorkerTask( "reader", () => { try { var inputFile = new InputFile(inputStream); var readBlocks = inputFile.ReadBlocks(settings.InputFileReadingBufferSize, offset); foreach (var(block, error) in readBlocks) { if (error.HasValue) { throw new CompressorException(error.Value); } var task = new DelegateTask(offset.ToString(), () => compression.Compress(block)); tasksQueue.EnqueueTask(task); offset = block.Offset; } }