internal static byte[] Compress(byte[] bytesToCompress, CompressionFlavor flavor) { if (bytesToCompress == null) { throw new InvalidDataException("Cannot compress null array"); } if (flavor == CompressionFlavor.StoreUncompressed) { throw new InvalidOperationException("Chunk format cannot be used for uncompressed data"); } MemoryStream output = new MemoryStream(bytesToCompress.Length); BinaryWriter writer = new BinaryWriter(output); writer.Write(Chunk.Header); writer.Write(Chunk.DefaultVersion); writer.Write((byte)CompressionMethod.ZLib); writer.Write(NoObfuscation); using (MemoryStream compressedStream = new MemoryStream(bytesToCompress.Length)) { compressedStream.WriteByte(0x78); //ZLib header first byte compressedStream.WriteByte(0xDA); //ZLib header second byte switch (flavor) { case CompressionFlavor.ZLibDeflate: using (DeflateStream deflateStream = new DeflateStream(compressedStream, CompressionLevel.Optimal, true)) deflateStream.Write(bytesToCompress, 0, bytesToCompress.Length); break; case CompressionFlavor.i5ZopfliDeflate: case CompressionFlavor.i10ZopfliDeflate: case CompressionFlavor.i15ZopfliDeflate: if (bytesToCompress.Length < Strategy.ZopfliBreakEven) //Skip Zopfli if file is small { goto case CompressionFlavor.ZLibDeflate; } ZopfliDeflater zstream = new ZopfliDeflater(compressedStream); zstream.NumberOfIterations = (int)flavor; zstream.MasterBlockSize = 0; zstream.Deflate(bytesToCompress, true); break; default: throw new InvalidOperationException("Unknow compression flavor"); } var compressedDataArray = compressedStream.ToArray(); //Change to stream int checksum = ComputeChecksum(compressedDataArray); //Change to stream writer.Write(compressedDataArray.Length); writer.Write(bytesToCompress.Length); writer.Write(checksum); writer.Write(compressedDataArray); } return(output.ToArray()); }
public FileEntry(byte[] uncompressedBytes, CompressionFlavor flavor, string fileName, IProgress <string> progress) { UncompressedSize = uncompressedBytes.Length; if (flavor != CompressionFlavor.StoreUncompressed && uncompressedBytes.Length > Strategy.DeflateBreakEven) //Skip compression of small files { compressedChunkSizes = new int[CalculateChunkQuantity()]; ChunkBytes = new byte[compressedChunkSizes.Length][]; // Parallelize chunk compression System.Threading.Tasks.Parallel.For(0, compressedChunkSizes.Length, j => { int size = Chunk.MaxSize; if (j + 1 == compressedChunkSizes.Length && uncompressedBytes.Length != Chunk.MaxSize) { size = uncompressedBytes.Length % Chunk.MaxSize; //Last loop } using (var ms = new MemoryStream(uncompressedBytes, j * Chunk.MaxSize, size)) { ChunkBytes[j] = Chunk.Compress(ms.ToArray(), flavor); if (Strategy.TryCompressKeepIfWorthwhile && compressedChunkSizes.Length == 1 && ChunkBytes[j].Length + 4 > ms.Length) { FlagCompression = CompressionMethod.None; ChunkBytes[0] = uncompressedBytes; } else { FlagCompression = CompressionMethod.ZLib; compressedChunkSizes[j] = ChunkBytes[j].Length; } } if (progress != null) { progress.Report(fileName + ":Chunk#" + j.ToString()); } }); // Parallel.For } else { FlagCompression = CompressionMethod.None; ChunkBytes = new byte[1][]; ChunkBytes[0] = uncompressedBytes; if (progress != null) { progress.Report(fileName); } } }
public static void CreateFromFileList(string[] fileListShortName, string sourceDirectoryPath, string destinationArchiveFileName, IProgress <string> progress, CompressionFlavor flavor) { var files = new SortedDictionary <string, FileEntry>(); for (int i = 0; i < fileListShortName.Length; i++) { string fullName = Path.Combine(sourceDirectoryPath, fileListShortName[i]); var file = new FileInfo(fullName); if (file.Length > Int32.MaxValue) { throw new Exception("File is too large: " + fileListShortName[i] + "Maximum allowed size is 2GB (2 147 483 647 bytes)."); } byte[] buffer = File.ReadAllBytes(fullName); files.Add(fileListShortName[i], new FileEntry(buffer, flavor, fullName, progress)); } WriteToFile(destinationArchiveFileName, files); }
public static void Merge(PathCollection archivesFiles, string destinationArchiveFileName, CompressionFlavor flavor, IProgress <string> progress) { var files = new SortedDictionary <string, FileEntry>(); foreach (var archiveFullPath in archivesFiles.Keys) { using (var archive = new HpiArchive(File.OpenRead(archiveFullPath))) foreach (var shortFileName in archivesFiles[archiveFullPath]) { var entry = archive.Entries[shortFileName]; var buffer = archive.Extract(entry); if (files.ContainsKey(shortFileName)) { files[shortFileName] = new FileEntry(buffer, flavor, shortFileName, progress); } else { files.Add(shortFileName, new FileEntry(buffer, flavor, shortFileName, progress)); } } } WriteToFile(destinationArchiveFileName, files); }
public static void CreateFromDirectory(string sourceDirectoryFullName, string destinationArchiveFileName, CompressionFlavor flavor, IProgress <string> progress) { destinationArchiveFileName = Path.GetFullPath(destinationArchiveFileName); var fileList = GetDirectoryFileList(sourceDirectoryFullName); CreateFromFileList(fileList.ToArray(), sourceDirectoryFullName, destinationArchiveFileName, progress, flavor); }