public void Run(int threads, CancellationToken cancellationToken) { var footer = CustomGzipFooter.FromStream(this._readerStream); this._chunkInfos = new ConcurrentBag <CustomGzipFooter.CompressedChunkInfo>(footer.Chunks); List <Thread> threadList = new List <Thread>(threads); using (CountdownEvent threadFinisher = new CountdownEvent(1)) { for (int i = 0; i < threads; i++) { var thread = new Thread((object param) => { Thread.CurrentThread.Name = $"Decompress ThreadWorker: {(int)param}"; StartWorker(); threadFinisher.Signal(); }); threadFinisher.AddCount(); thread.Start(i); } threadFinisher.Signal(); threadFinisher.Wait(); } }
public static CustomGzipFooter FromStream(Stream stream) { if (!stream.CanSeek) { throw new ArgumentException("Given stream must allow seeking.", nameof(stream)); } CustomGzipFooter footer = new CustomGzipFooter(); // seek to read the footer size stream.Seek(-sizeof(ulong), SeekOrigin.End); using (BinaryReader br = new BinaryReader(stream, Encoding.ASCII, true)) { var footerSize = br.ReadInt64(); br.BaseStream.Seek(-footerSize, SeekOrigin.End); var chunkInfosCount = GetNumberOfChunksFromByteSize(footerSize); for (int i = 0; i < chunkInfosCount; i++) { long originalStart, compressedStart, originalLength, compressedLength; originalStart = br.ReadInt64(); compressedStart = br.ReadInt64(); originalLength = br.ReadInt64(); compressedLength = br.ReadInt64(); footer.AddChunkInfo(originalStart, compressedStart, originalLength, compressedLength); } long originalSize = br.ReadInt64(); footer.OriginalSize = originalSize; } return(footer); }
public void Run(int threads, CancellationToken cancellationToken) { var footer = CustomGzipFooter.FromStream(this._readerStream); int chunkIndex = 0; long currentBlockOffset; this._readerStream.Seek(0, SeekOrigin.Begin); while (chunkIndex < footer.Chunks.Count) { if (cancellationToken.IsCancellationRequested) { return; } long compressedChunksBlockSize = 0, originalChunksBlockSize = 0; var chunkInfosInCurrentBlock = footer.Chunks.Skip(chunkIndex).TakeWhile(x => { compressedChunksBlockSize += x.CompressedLength; originalChunksBlockSize += x.OriginalLength; if (originalChunksBlockSize <= MAX_MEMORY_BLOCK_SIZE) { return(true); } else { compressedChunksBlockSize -= x.CompressedLength; originalChunksBlockSize -= x.OriginalLength; return(false); } }).ToArray(); int threadsForCurrentBlock = Math.Min(chunkInfosInCurrentBlock.Length, threads); chunkIndex += chunkInfosInCurrentBlock.Length; currentBlockOffset = chunkInfosInCurrentBlock[0].CompressedStart; int bytesRead; byte[] buffer = new byte[compressedChunksBlockSize]; bytesRead = this._readerStream.Read(buffer, 0, (int)compressedChunksBlockSize); List <CustomGzipFooter.CompressedChunkInfo>[] threadChunkAssignations = new List <CustomGzipFooter.CompressedChunkInfo> [threadsForCurrentBlock]; for (int i = 0; i < chunkInfosInCurrentBlock.Length; i++) { if (threadChunkAssignations[i % threadsForCurrentBlock] == null) { threadChunkAssignations[i % threadsForCurrentBlock] = new List <CustomGzipFooter.CompressedChunkInfo>(); } threadChunkAssignations[i % threadsForCurrentBlock].Add(chunkInfosInCurrentBlock[i]); } if (threadsForCurrentBlock == 1) { StartWorker(0, currentBlockOffset, buffer, threadChunkAssignations[0].AsReadOnly(), cancellationToken); } else { using (CountdownEvent threadFinisher = new CountdownEvent(1)) { for (int i = 0; i < threadsForCurrentBlock; i++) { var thread = new Thread((object param) => { int threadIndex = (int)param; Thread.CurrentThread.Name = $"Decompress ThreadWorker: {threadIndex}"; StartWorker(threadIndex, currentBlockOffset, buffer, threadChunkAssignations[threadIndex].AsReadOnly(), cancellationToken); threadFinisher.Signal(); }); threadFinisher.AddCount(); thread.Start(i); } threadFinisher.Signal(); threadFinisher.Wait(); } } this._writerStream.Flush(); } }