public static void Process( string fileName, SegmentProvider segmentProvider, CompressionMode mode, long maxCollectionSize, DataContext dataContext, SynchronizationContext synchronizationContext) { using (var inputStream = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { while (true) { if (synchronizationContext.FinishedReading) { break; } synchronizationContext.ZipperEvent.WaitOne(); if (dataContext.OutputData.Keys.Count() >= maxCollectionSize) { // Too many data is already in queue waiting to be written to output. // Zipper threads should be stopped to release disk and processor load. synchronizationContext.ZipperEvent.Reset(); continue; } try { var indexAndSize = segmentProvider.GetNextIndexAndSize(inputStream); var index = indexAndSize.Item1; var size = indexAndSize.Item2; if (size == 0) { synchronizationContext.FinishedReading = true; dataContext.OutputData[index] = new Segment(new byte[0], true); break; } Trace.TraceInformation( $"Worker {Thread.CurrentThread.GetHashCode()} processing {index} segment."); var buffer = new byte[size]; inputStream.Read(buffer, 0, size); dataContext.OutputData[index] = mode == CompressionMode.Compress ? Compress(buffer) : Decompress(buffer); synchronizationContext.WriterEvent.Set(); } catch (Exception e) { Trace.TraceError($" Error: {e.Message}"); throw; } } } Trace.TraceInformation($"Worker {Thread.CurrentThread.GetHashCode()} stopped processing data."); }
public void Process() { var freeMemory = (long)new PerformanceCounter("Memory", "Available Bytes").NextValue(); freeMemory = Math.Min(freeMemory, 800 * 1024 * 1024); // 800Mb is already 200 segments of data in queue, zippers should be stopped var bufferSize = 4L * 1024 * 1024; Trace.TraceInformation($"Free memory: {freeMemory}; Worker limit: {workerThreadLimit}; Buffer size: {bufferSize}"); var maxCollectionMembers = freeMemory / bufferSize; if (maxCollectionMembers < 1) // rare case { maxCollectionMembers = 1; bufferSize = Math.Min(inputFileName.Length, freeMemory); } var dataContext = new DataContext(); var synchronizationContext = new SynchronizationContext(); var segmentProvider = new SegmentProvider(mode == CompressionMode.Decompress, bufferSize); for (var i = 0; i < workerThreadLimit; i++) { var workerThread = new Thread( () => Zipper.Process( inputFileName, segmentProvider, mode, maxCollectionMembers, dataContext, synchronizationContext)); workerThread.Start(); workerThreads.Add(workerThread); } using (var outStream = new FileStream(this.outputFileName, FileMode.Create)) { writerThread = new Thread( () => Writer.Write( outStream, mode == CompressionMode.Compress, dataContext, synchronizationContext)); writerThread.Start(); writerThread.Join(); } }