public void ProduceTest_WhenMaxProduceWithConsume_ShouldReturnAllItems(int consumersCount) { var maxSize = 10; var actual = new BlockingDictionary <int, string>( maxSize, new SortedDictionary <int, string>( new Dictionary <int, string>(1000))); var produceThread = new Thread(() => Produce(actual, Enumerable.Range(0, 1000).ToArray())); var consumeThreads = new List <Thread>(); consumeThreads .AddRange(Enumerable.Range(0, consumersCount) .Select(_ => new Thread(() => Consume(actual)))); produceThread.Start(); foreach (var thread in consumeThreads) { thread.Start(); } produceThread.Join(); actual.Close(); foreach (var thread in consumeThreads) { thread.Join(); } Assert.That(actual.Size(), Is.EqualTo(0)); }
public void CreateBlockingDictionary() { Assert.DoesNotThrow(() => _ = new BlockingDictionary <string, int>()); Assert.DoesNotThrow(() => _ = new BlockingDictionary <string, int>(1)); Assert.DoesNotThrow(() => _ = new BlockingDictionary <string, int>(EqualityComparer <string> .Default)); Assert.DoesNotThrow(() => _ = new BlockingDictionary <string, int>(1, EqualityComparer <string> .Default)); Assert.DoesNotThrow(() => _ = new BlockingDictionary <string, int>(new Dictionary <string, int>())); Assert.DoesNotThrow(() => _ = new BlockingDictionary <string, int>(new Dictionary <string, int>(), EqualityComparer <string> .Default)); }
public void ProduceTest_WhenReadByKeys_ShouldReturnAll() { var maxSize = 3001; var blockingDictionary = new BlockingDictionary <int, string>(maxSize, new Dictionary <int, string>(maxSize)); var expectedData = Enumerable.Range(0, 4000).ToArray(); var produceThreads = new List <Thread> { new Thread(() => Produce(blockingDictionary, expectedData[..1000])),
public GZipArchivator(Settings settings) { var factory = ArchivatorFactory.CreateFactory(settings); _reader = factory.GetReader(); _writer = factory.GetWriter(); _processor = factory.GetBlockProcessor(); _processingBuffer = new BlockingQueue <Block>(); _writingBuffer = new BlockingDictionary <int, Block>(); _blocksLimiter = new Semaphore(settings.BlocksLimit, settings.BlocksLimit); }
public void Read(BlockingDictionary <int, byte[]> dictionary, BinaryReader reader) { var i = 0; while (reader.BaseStream.Length != reader.BaseStream.Position) { _timer.Start(); var content = reader.ReadBytes(_chunkSize); _timer.Stop(); dictionary.Add(i, content); _logger.Write($"Read chunk {i}"); i++; } }
public void ProcessChunks(BlockingDictionary <int, byte[]> inputDictionary, BlockingDictionary <int, byte[]> outputDictionary) { while (!inputDictionary.IsComplete()) { try { var(id, chunk) = inputDictionary.GetFirstItem(); var processedChunk = _contentProcessor.Process(chunk); outputDictionary.Add(id, processedChunk); _logger.Write($"Process chunk {id}"); } catch (InvalidOperationException) { break; } } }
public void Write(BlockingDictionary <int, byte[]> outputDictionary, BinaryWriter binaryWriter) { var id = 0; while (!outputDictionary.IsComplete()) { try { var chunk = outputDictionary.GetByKey(id); _timer.Start(); binaryWriter.Write(chunk); _timer.Stop(); _logger.Write($"Write chunk {id}"); id++; } catch (InvalidOperationException) { break; } } }
public void Write(BlockingDictionary <int, byte[]> outputDictionary, BinaryWriter binaryWriter) { var id = 0; while (!outputDictionary.IsComplete()) { try { var chunk = outputDictionary.GetByKey(id); var contentToWrite = BitConverter.GetBytes(chunk.Length).Concat(chunk).ToArray(); _timer.Start(); binaryWriter.Write(contentToWrite); _timer.Stop(); _logger.Write($"Write compressed chunk {id}, length {chunk.Length}"); id++; } catch (InvalidOperationException) { break; } } }
public void Process(string inputFilePath, string outputFilePath) { using var inputDictionary = new BlockingDictionary <int, byte[]>( _settings.InputBufferSize, new SortedDictionary <int, byte[]>(new Dictionary <int, byte[]>(_settings.InputBufferSize))); using var outputDictionary = new BlockingDictionary <int, byte[]>( _settings.OutputBufferSize, new Dictionary <int, byte[]>(_settings.OutputBufferSize)); var workerThreads = new List <Thread>(); _logger.Write($"Start processing with parallelism level {_settings.ParallelismLevel}"); using var binaryReader = _fileService.GetReader(inputFilePath); var readerThread = new Thread(() => _reader.Read(inputDictionary, binaryReader)); workerThreads.AddRange( Enumerable.Range(0, _settings.ParallelismLevel) .Select(_ => new Thread(() => _processor.ProcessChunks(inputDictionary, outputDictionary)))); using var binaryWriter = _fileService.GetWriter(outputFilePath); var writerThread = new Thread(() => _writer.Write(outputDictionary, binaryWriter)); readerThread.Start(); foreach (var thread in workerThreads) { thread.Start(); } writerThread.Start(); readerThread.Join(); inputDictionary.Close(); foreach (var thread in workerThreads) { thread.Join(); } outputDictionary.Close(); writerThread.Join(); }