private void AddFiles(string physicalPathRoot, string[] sourceFiles, ParallelTasks <CompressBlock> tasks) { // Write file data sequentially ulong decompressedFileOffset = 0; var readBuffer = new byte[BlockSize]; var readBufferPos = 0; ulong blockOffset = 0; foreach (string filePath in sourceFiles) { using var fs = File.OpenRead(Path.Combine(physicalPathRoot, filePath)); var fileEntry = new FileEntry() { PathHash = GetHashForPath(filePath), DecompressedOffset = decompressedFileOffset, DecompressedSize = (uint)fs.Length, }; decompressedFileOffset += fileEntry.DecompressedSize; FileEntries.Add(fileEntry); // This appends data until a 256KB block write/flush is triggered - combining multiple files into single block entries int read; while ((read = fs.Read(readBuffer, readBufferPos, readBuffer.Length - readBufferPos)) > 0) { if (readBufferPos + read < BlockSize) { readBufferPos += read; break; } tasks.AddItem(new CompressBlock() { DecompressedOffset = blockOffset, DecompressedSize = BlockSize, DataBuffer = readBuffer }); readBufferPos = 0; readBuffer = new byte[BlockSize]; blockOffset += BlockSize; } } if (readBufferPos > 0) { tasks.AddItem(new CompressBlock() { DecompressedOffset = blockOffset, DecompressedSize = (uint)readBufferPos, DataBuffer = readBuffer }); } }