public LzhamStream(Stream stream, CompressionParameters mode, bool leaveOpen) { if (stream == null) throw new ArgumentNullException(nameof(stream)); if (!stream.CanWrite) throw new ArgumentException("The base stream is not writeable", nameof(stream)); _stream = stream; _leaveOpen = leaveOpen; _buffer = new byte[DefaultBufferSize]; _compressionHandle = LzhamInterop.CompressInit(mode); if (_compressionHandle.IsInvalid) { throw new ApplicationException("Could not initialize compression stream with specified parameters"); } }
public CompressionParameters GetCompressionParameters() { CompressionParameters result = new CompressionParameters { DictionarySize = DictionarySize, Level = Level, UpdateRate = UpdateRate, HelperThreads = HelperThreads }; result.Flags |= Zlib ? CompressionFlag.WriteZlibStream : 0; result.Flags |= Tradeoff ? CompressionFlag.TradeIffDecompressionForCompressionRatio : 0; result.Flags |= DeterministicParsing ? CompressionFlag.DeterministicParsing : 0; result.Flags |= ExtremeParsing ? CompressionFlag.ExtremeParsing : 0; return result; }
public LzhamStream(Stream stream, CompressionParameters mode) : this(stream, mode, false) { }
private static int Repack(RepackOptions options) { if (options.Level < 0 || options.Level > CompressionLevel.Uber) { Console.WriteLine("Compresson level should be between 0 and 4 (inclusive)"); return 9; } CompressionLevel level = options.Level; string inputFolder = options.InputFolder ?? Environment.CurrentDirectory; File.Delete(options.OutputFile); using (FileStream fs = File.OpenWrite(options.OutputFile)) { int headerSize = Marshal.SizeOf(typeof (TimHeader)); byte[] headerBytes = new byte[headerSize]; fs.Write(headerBytes,0, headerSize); Packer packer = new Packer(inputFolder,fs, level); Packer.PackerTables tables = packer.Pack(); MD5 md5 = MD5.Create(); TimHeader header = new TimHeader { headerSize = (uint) headerSize, version = TimHeader.currentVersion, id = TimHeader.signature, directoryListingDigest = md5.ComputeHash(tables.Directory), fileRegistryDigest = md5.ComputeHash(tables.FileRegistry), bucketTableDigest = md5.ComputeHash(tables.BucketTable), hashtableDigest = md5.ComputeHash(tables.HashTable), directoryListngSize = (uint) tables.Directory.Length, fileCount = (uint) tables.FileCount }; CompressionParameters p = new CompressionParameters { DictionarySize = Util.GetDictLength((uint)tables.Directory.Length), UpdateRate = TableUpdateRate.Fastest, Level = level }; byte[] outputBufer = new byte[Math.Max(tables.Directory.Length*2,128)]; uint addler = 0; int outsize = outputBufer.Length; CompressStatus status = Lzham.CompressMemory(p, tables.Directory, tables.Directory.Length, 0, outputBufer, ref outsize, 0, ref addler); if (status != CompressStatus.Success) { Console.WriteLine("Failed to pack directory listing. ({status})"); Environment.FailFast("Failed to pack directory listing. ({status})"); } header.directoryListingCompressedSize = (uint)outsize; header.directoryListingOffset = (ulong)fs.Position; fs.Write(outputBufer,0, outsize); p = new CompressionParameters { DictionarySize = Util.GetDictLength((uint)tables.FileRegistry.Length), UpdateRate = TableUpdateRate.Fastest, Level = level }; outputBufer = new byte[Math.Max(tables.FileRegistry.Length*2,128)]; outsize = outputBufer.Length; status = Lzham.CompressMemory(p, tables.FileRegistry, tables.FileRegistry.Length, 0, outputBufer, ref outsize, 0, ref addler); if (status != CompressStatus.Success) { Console.WriteLine("Failed to pack file registry. ({status})"); Environment.FailFast("Failed to pack file registry. ({status})"); } header.fileRegistryCompressedSize = (uint)outsize; header.fileRegistryOffset = (ulong)fs.Position; fs.Write(outputBufer, 0, outsize); p = new CompressionParameters { DictionarySize = Util.GetDictLength((uint)tables.HashTable.Length), UpdateRate = TableUpdateRate.Fastest, Level = level }; outputBufer = new byte[Math.Max(tables.HashTable.Length*2,128)]; outsize = outputBufer.Length; status = Lzham.CompressMemory(p, tables.HashTable, tables.HashTable.Length, 0, outputBufer, ref outsize, 0, ref addler); if (status != CompressStatus.Success) { Console.WriteLine("Failed to pack hash table. ({status})"); Environment.FailFast("Failed to pack hash table. ({status})"); } header.hashtableCompressedSize = (uint)outsize; header.hashtableOffset = (ulong)fs.Position; fs.Write(outputBufer, 0, outsize); p = new CompressionParameters { DictionarySize = Util.GetDictLength((uint)tables.BucketTable.Length), UpdateRate = TableUpdateRate.Fastest, Level = level }; outputBufer = new byte[Math.Max(tables.BucketTable.Length*2,128)]; outsize = outputBufer.Length; status = Lzham.CompressMemory(p, tables.BucketTable, tables.BucketTable.Length, 0, outputBufer, ref outsize, 0, ref addler); if (status != CompressStatus.Success) { Console.WriteLine("Failed to pack bucket table. ({status})"); Environment.FailFast("Failed to pack bucket table. ({status})"); } header.bucketTableCompressedSize = (uint)outsize; header.bucketTableOffset = (ulong)fs.Position; fs.Write(outputBufer, 0, outsize); fs.Seek(0, SeekOrigin.Begin); headerBytes = Util.StructureToByteArray(header); fs.Write(headerBytes,0, headerSize); } return 0; }
public static CompressStatus CompressMemory(CompressionParameters parameters, byte[] inBuf, int inBufSize, int inBufOffset, byte[] outBuf, ref int outBufSize, int outBufOffset, ref uint adler32) { return LzhamInterop.CompressMemory(parameters, outBuf, ref outBufSize, outBufOffset, inBuf, inBufSize, inBufOffset, ref adler32); }
public static unsafe CompressStatus CompressMemory(CompressionParameters parameters, byte[] outBuf, ref int outBufSize, int outBufOffset, byte[] inBuf, int inBufSize, int inBufOffset, ref uint adler32) { if (outBufOffset + outBufSize > outBuf.Length) { throw new ArgumentException("Offset plus count is larger than the length of array", nameof(outBuf)); } if (inBufOffset + inBufSize > inBuf.Length) { throw new ArgumentException("Offset plus count is larger than the length of array", nameof(inBuf)); } CompressionParametersInternal p; p.m_struct_size = (uint)sizeof(CompressionParametersInternal); p.m_compress_flags = parameters.Flags; p.m_dict_size_log2 = parameters.DictionarySize; p.m_level = parameters.Level; p.m_max_helper_threads = parameters.HelperThreads; p.m_table_max_update_interval = parameters.MaxUpdateInterval; p.m_table_update_interval_slow_rate = parameters.UpdateIntervalSlowRate; p.m_table_update_rate = parameters.UpdateRate; if (parameters.SeedBytes != null) { p.m_num_seed_bytes = (uint) parameters.SeedBytes.Length; } p.m_struct_size = (uint)sizeof(CompressionParametersInternal); fixed (byte* seedBytes = parameters.SeedBytes) fixed (byte* outBytes = outBuf) fixed (byte* inBytes = inBuf) { p.m_pSeed_bytes = seedBytes; byte* pBytes = (byte*)&p; IntPtr outSize = new IntPtr(outBufSize); CompressStatus result = (CompressStatus)lzham_compress_memory(pBytes, outBytes+outBufOffset, ref outSize, inBytes+inBufOffset, inBufSize, ref adler32); outBufSize = outSize.ToInt32(); return result; } }
public static unsafe CompressionHandle CompressInit(CompressionParameters parameters) { CompressionParametersInternal p; p.m_struct_size = (uint)sizeof(CompressionParametersInternal); p.m_compress_flags = parameters.Flags; p.m_dict_size_log2 = parameters.DictionarySize; p.m_level = parameters.Level; p.m_max_helper_threads = parameters.HelperThreads; p.m_table_max_update_interval = parameters.MaxUpdateInterval; p.m_table_update_interval_slow_rate = parameters.UpdateIntervalSlowRate; p.m_table_update_rate = parameters.UpdateRate; if (parameters.SeedBytes != null) { p.m_num_seed_bytes = (uint) parameters.SeedBytes.Length; } fixed (byte* seedBytes = parameters.SeedBytes) { p.m_pSeed_bytes = seedBytes; byte* pBytes = (byte*)&p; return lzham_compress_init(pBytes); } }
private ushort PackFolder(string path) { long pos = _directory.Position; DirectoryListingEntryHeader dleh = new DirectoryListingEntryHeader(); string[] folders = Directory.GetDirectories(path); string[] files = Directory.GetFiles(path); dleh.numDirectories = (ushort)folders.Length; dleh.numFiles = (ushort)files.Length; string dlehName = Path.GetFileName(path.Substring(_pathPrefixLength)); byte[] folderNameBytes = Encoding.UTF8.GetBytes(dlehName); ushort headerSize = (ushort)(Marshal.SizeOf(dleh) + folderNameBytes.Length); dleh.nameLength = (ushort)(folderNameBytes.Length); _directory.Position += headerSize; ushort foldersSize = Directory.GetDirectories(path).Aggregate<string, ushort>(0, (current, folder) => (ushort)(current + PackFolder(folder))); ushort filesSize = 0; foreach (string file in files) { Console.WriteLine(file); string shortName = file.Substring(_pathPrefixLength).Replace("\\","/"); byte[] nameBytes = Encoding.UTF8.GetBytes(shortName); Bucket bucket = new Bucket { Index = _fileCounter++, IndexHash = Util.Murmur2(nameBytes, 0), BucketHash = Util.Murmur2(nameBytes, BitConverter.ToUInt32(Encoding.UTF8.GetBytes("AMIT"), 0)) }; _buckets.AddOrUpdate(bucket.IndexHash, (new[] {bucket}).ToList(), (hash, list) => { list.Add(bucket); return list; }); uint fileSize = (uint)(new FileInfo(file)).Length; FileRegistryEntry fre = new FileRegistryEntry { uncompressed = fileSize, fileOffset = (ulong) _fs.Position }; byte[] fileData = File.ReadAllBytes(file); CompressionParameters p = new CompressionParameters { DictionarySize = Util.GetDictLength(fileSize), UpdateRate = TableUpdateRate.Fastest, Level = _level }; byte[] compressedData = new byte[fileSize]; int compressedSize = (int)fileSize; uint addler = 0; //Yikes!!!! GC.Collect(); GC.WaitForFullGCComplete(); CompressStatus status = Lzham.CompressMemory(p, fileData, fileData.Length, 0, compressedData, ref compressedSize, 0, ref addler); if (status != CompressStatus.Success && status != CompressStatus.OutputBufferTooSmall) { Console.WriteLine($"Error compressing file {file}. ({status})"); Environment.FailFast($"Error compressing file {file}. ({status})"); } if (status != CompressStatus.OutputBufferTooSmall) { fre.flag = 1; fre.compressed = (uint)compressedSize; _fs.Write(compressedData, 0, compressedSize); } else { fre.flag = 0; fre.compressed = fileSize; using (FileStream f = File.OpenRead(file)) { f.CopyTo(_fs); } } _files.Add(fre); string justTheName = Path.GetFileName(file); byte[] fileNameBytes = Encoding.UTF8.GetBytes(justTheName); _directory.Write(BitConverter.GetBytes((ushort)fileNameBytes.Length), 0, 2); _directory.Write(fileNameBytes, 0, fileNameBytes.Length); filesSize += (ushort)(fileNameBytes.Length + 2); } dleh.size = (ushort)(headerSize + foldersSize + filesSize); long savePos = _directory.Position; _directory.Position = pos; byte[] data = Util.StructureToByteArray(dleh); _directory.Write(data,0, data.Length); _directory.Write(folderNameBytes, 0, folderNameBytes.Length); _directory.Position = savePos; return dleh.size; }