Exemple #1
0
        private static int Repack(RepackOptions options)
        {
            if (options.Level < 0 || options.Level > CompressionLevel.Uber)
            {
                Console.WriteLine("Compresson level should be between 0 and 4 (inclusive)");
                return 9;
            }
            CompressionLevel level = options.Level;
            string inputFolder = options.InputFolder ?? Environment.CurrentDirectory;
            File.Delete(options.OutputFile);
            using (FileStream fs = File.OpenWrite(options.OutputFile))
            {
                int headerSize = Marshal.SizeOf(typeof (TimHeader));
                byte[] headerBytes = new byte[headerSize];
                fs.Write(headerBytes,0, headerSize);
                Packer packer = new Packer(inputFolder,fs, level);
                Packer.PackerTables tables = packer.Pack();
                MD5 md5 = MD5.Create();
                TimHeader header = new TimHeader
                {
                    headerSize = (uint) headerSize,
                    version = TimHeader.currentVersion,
                    id = TimHeader.signature,
                    directoryListingDigest = md5.ComputeHash(tables.Directory),
                    fileRegistryDigest = md5.ComputeHash(tables.FileRegistry),
                    bucketTableDigest = md5.ComputeHash(tables.BucketTable),
                    hashtableDigest = md5.ComputeHash(tables.HashTable),
                    directoryListngSize = (uint) tables.Directory.Length,
                    fileCount = (uint) tables.FileCount
                };

                CompressionParameters p = new CompressionParameters
                {
                    DictionarySize = Util.GetDictLength((uint)tables.Directory.Length),
                    UpdateRate = TableUpdateRate.Fastest,
                    Level = level
                };

                byte[] outputBufer = new byte[Math.Max(tables.Directory.Length*2,128)];
                uint addler = 0;
                int outsize = outputBufer.Length;
                CompressStatus status = Lzham.CompressMemory(p, tables.Directory, tables.Directory.Length, 0, outputBufer, ref outsize, 0, ref addler);

                if (status != CompressStatus.Success)
                {
                    Console.WriteLine("Failed to pack directory listing. ({status})");
                    Environment.FailFast("Failed to pack directory listing. ({status})");
                }

                header.directoryListingCompressedSize = (uint)outsize;
                header.directoryListingOffset = (ulong)fs.Position;
                fs.Write(outputBufer,0, outsize);

                p = new CompressionParameters
                {
                    DictionarySize = Util.GetDictLength((uint)tables.FileRegistry.Length),
                    UpdateRate = TableUpdateRate.Fastest,
                    Level = level
                };

                outputBufer = new byte[Math.Max(tables.FileRegistry.Length*2,128)];
                outsize = outputBufer.Length;
                status = Lzham.CompressMemory(p, tables.FileRegistry, tables.FileRegistry.Length, 0, outputBufer, ref outsize, 0, ref addler);

                if (status != CompressStatus.Success)
                {
                    Console.WriteLine("Failed to pack file registry. ({status})");
                    Environment.FailFast("Failed to pack file registry. ({status})");
                }

                header.fileRegistryCompressedSize = (uint)outsize;
                header.fileRegistryOffset = (ulong)fs.Position;
                fs.Write(outputBufer, 0, outsize);

                p = new CompressionParameters
                {
                    DictionarySize = Util.GetDictLength((uint)tables.HashTable.Length),
                    UpdateRate = TableUpdateRate.Fastest,
                    Level = level
                };

                outputBufer = new byte[Math.Max(tables.HashTable.Length*2,128)];
                outsize = outputBufer.Length;
                status = Lzham.CompressMemory(p, tables.HashTable, tables.HashTable.Length, 0, outputBufer, ref outsize, 0, ref addler);

                if (status != CompressStatus.Success)
                {
                    Console.WriteLine("Failed to pack hash table. ({status})");
                    Environment.FailFast("Failed to pack hash table. ({status})");
                }

                header.hashtableCompressedSize = (uint)outsize;
                header.hashtableOffset = (ulong)fs.Position;
                fs.Write(outputBufer, 0, outsize);

                p = new CompressionParameters
                {
                    DictionarySize = Util.GetDictLength((uint)tables.BucketTable.Length),
                    UpdateRate = TableUpdateRate.Fastest,
                    Level = level
                };

                outputBufer = new byte[Math.Max(tables.BucketTable.Length*2,128)];
                outsize = outputBufer.Length;
                status = Lzham.CompressMemory(p, tables.BucketTable, tables.BucketTable.Length, 0, outputBufer, ref outsize, 0, ref addler);

                if (status != CompressStatus.Success)
                {
                    Console.WriteLine("Failed to pack bucket table. ({status})");
                    Environment.FailFast("Failed to pack bucket table. ({status})");
                }

                header.bucketTableCompressedSize = (uint)outsize;
                header.bucketTableOffset = (ulong)fs.Position;
                fs.Write(outputBufer, 0, outsize);

                fs.Seek(0, SeekOrigin.Begin);

                headerBytes = Util.StructureToByteArray(header);
                fs.Write(headerBytes,0, headerSize);
            }
            return 0;
        }
Exemple #2
0
 private static int ValidateHeader(TimHeader header)
 {
     if (header.id != TimHeader.signature)
     {
         Console.WriteLine("Unknown format");
         return 3;
     }
     if (header.version != TimHeader.currentVersion)
     {
         Console.WriteLine("Unknown archive version");
         return 4;
     }
     return 0;
 }