public static byte[] Compress(byte[] src) { using (var source = new MemoryStream(src)) { using (var destStream = new MemoryStream()) { using (var compressor = new ZLibStream(destStream, CompressionMode.Compress, CompressionLevel.Level6)) { source.CopyTo(compressor); } return destStream.ToArray(); } } }
private bool FlushZlibCompressedBlock(MemoryStream data, int blockLength) { var zlib = new ZLibStream(data, CompressionMode.Compress, CompressionLevel.Level9); zlib.Write(this._BlockBytes, 0, blockLength); zlib.Flush(); // If it doesn't fit within the range of ratio, store as uncompressed. if (!IsWithinCompressionRatio((int)zlib.TotalOut, blockLength)) { return(false); } var compressedLength = (int)data.Length; if (data.Length < blockLength) { this._BaseStream.WriteValueS32(32 + compressedLength, this._Endian); this._BaseStream.WriteValueU8(1); CompressedBlockHeader compressedBlockHeader = new CompressedBlockHeader(); compressedBlockHeader.SetZlibPreset(); compressedBlockHeader.UncompressedSize = (uint)blockLength; //TODO: I think this should actually be alignment? compressedBlockHeader.CompressedSize = (uint)compressedLength; compressedBlockHeader.ChunkSize = (short)_Alignment; compressedBlockHeader.Unknown0C = 135200769; compressedBlockHeader.Chunks[0] = (ushort)compressedBlockHeader.CompressedSize; compressedBlockHeader.Write(this._BaseStream, this._Endian); this._BaseStream.Write(data.GetBuffer(), 0, compressedLength); this._BlockOffset = 0; zlib.Close(); zlib.Dispose(); return(true); } return(false); }
public override Stream OpenEntry(ArcFile arc, Entry entry) { var ient = entry as IdaEntry; if (null == ient || 0 == ient.Flags) { return(base.OpenEntry(arc, entry)); } Stream input = arc.File.CreateStream(entry.Offset, entry.Size); if (0 != (ient.Flags & 0xB)) { input = DecryptEntry(input, ient); } if (0 != (ient.Flags & 4)) { input = new PackedStream <RleDecompressor> (input); } if (0 != (ient.Flags & 0x10)) { input = new ZLibStream(input, CompressionMode.Decompress); } return(input); }
public override void Write(Stream file, ImageData image) { using (var output = new XoredStream(file, 0x21, true)) using (var zstream = new ZLibStream(output, CompressionMode.Compress, CompressionLevel.Level9)) Bmp.Write(zstream, image); }
public override SoundInput TryOpen(IBinaryStream file) { var header = file.ReadHeader(0x23); int bits1_length = header.ToInt32(0x13); int bits2_length = header.ToInt32(0x17); var data = file.ReadBytes(bits1_length + bits2_length); CgfDecoder.Decrypt(data, bits1_length); int unpacked_length = header.ToInt32(5); uint sample_rate = header.ToUInt32(0xD); int sample_count = unpacked_length >> 1; var bits1 = new byte[(sample_count + 7) >> 3]; var bits2 = new byte[sample_count >> 1]; using (var mem = new MemoryStream(data, 4, bits1_length - 4)) using (var input = new ZLibStream(mem, CompressionMode.Decompress)) input.Read(bits1, 0, bits1.Length); using (var mem = new MemoryStream(data, bits1_length + 4, bits2_length - 4)) using (var input = new ZLibStream(mem, CompressionMode.Decompress)) input.Read(bits2, 0, bits2.Length); short init = header.ToInt16(0x11); var decoded = new short[sample_count]; DecodeAdp(bits2, decoded, sample_count, init); var output = new byte[unpacked_length]; byte bit = 0x80; int src = 0; int dst = 0; for (int i = 0; i < decoded.Length; ++i) { short sample = Math.Max(decoded[i], (short)0); if ((bit & bits1[src]) != 0) { sample = (short)-sample; } LittleEndian.Pack(sample, output, dst); dst += 2; bit >>= 1; if (0 == bit) { ++src; bit = 0x80; } } var format = new WaveFormat { FormatTag = 1, Channels = 1, SamplesPerSecond = sample_rate, BlockAlign = 2, BitsPerSample = 16, }; format.SetBPS(); var pcm = new MemoryStream(output); return(new RawPcmInput(pcm, format)); }
public override ArcFile TryOpen(ArcView file) { int version; if (file.View.AsciiEqual(4, "1.00")) { version = 100; } else if (file.View.AsciiEqual(4, "1.10")) { version = 110; } else { return(null); } int count = file.View.ReadInt32(0x14); if (!IsSaneCount(count)) { return(null); } int bucket_count = file.View.ReadInt32(0x18); uint index_size = file.View.ReadUInt32(0x1C); uint arc_seed = file.View.ReadUInt32(0x20); long index_offset = version >= 110 ? 0x2C : 0x24; long base_offset = index_offset + index_size; var blowfish = new Blowfish(IndexKey); var packed_bytes = file.View.ReadBytes(index_offset, index_size); blowfish.Decipher(packed_bytes, packed_bytes.Length & ~7); using (var input = new MemoryStream(packed_bytes)) using (var unpacked = new ZLibStream(input, CompressionMode.Decompress)) using (var index = new BinaryReader(unpacked)) { var file_map = BuildFileNameMap(arc_seed); var dir_table = new List <TacBucket> (bucket_count); for (int i = 0; i < bucket_count; ++i) { var entry = new TacBucket(); entry.Hash = index.ReadUInt16(); entry.Count = index.ReadUInt16(); entry.Index = index.ReadInt32(); dir_table.Add(entry); } var dir = new List <Entry> (count); for (int i = 0; i < count; ++i) { var entry = new TacEntry(); entry.Hash = index.ReadUInt64(); entry.IsPacked = index.ReadInt32() != 0; entry.UnpackedSize = index.ReadUInt32(); entry.Offset = base_offset + index.ReadUInt32(); entry.Size = index.ReadUInt32(); if (!entry.CheckPlacement(file.MaxOffset)) { return(null); } dir.Add(entry); } var buffer = new byte[8]; foreach (var bucket in dir_table) { for (int i = 0; i < bucket.Count; ++i) { var entry = dir[bucket.Index + i] as TacEntry; entry.Hash = entry.Hash << 16 | bucket.Hash; bool known_name = file_map.ContainsKey(entry.Hash); if (known_name) { entry.Name = file_map[entry.Hash]; entry.Type = FormatCatalog.Instance.GetTypeFromName(entry.Name); } else { entry.Name = string.Format("{0:X16}", entry.Hash); } if (entry.IsPacked) { continue; } entry.Key = Encoding.ASCII.GetBytes(string.Format("{0}_tlib_secure_", entry.Hash)); if (!known_name) { var bf = new Blowfish(entry.Key); file.View.Read(entry.Offset, buffer, 0, 8); bf.Decipher(buffer, 8); var res = AutoEntry.DetectFileType(buffer.ToUInt32(0)); if (res != null) { entry.ChangeType(res); } } if ("image" == entry.Type) { entry.EncryptedSize = Math.Min(10240, entry.Size); } else { entry.EncryptedSize = entry.Size; } } } return(new ArcFile(file, this, dir)); } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var ypf_options = GetOptions <YpfOptions> (options); if (null == ypf_options) { throw new ArgumentException("Invalid archive creation options", "options"); } if (ypf_options.Key > 0xff) { throw new InvalidEncryptionScheme(arcStrings.MsgCreationKeyRequired); } if (0 == ypf_options.Version) { throw new InvalidFormatException(arcStrings.MsgInvalidVersion); } var scheme = new YpfScheme { SwapTable = GuessSwapTable(ypf_options.Version), Key = (byte)ypf_options.Key }; int callback_count = 0; var encoding = Encodings.cp932.WithFatalFallback(); ChecksumFunc Checksum = data => Crc32.Compute(data, 0, data.Length); uint data_offset = 0x20; var file_table = new List <YpfEntry>(); foreach (var entry in list) { try { string file_name = entry.Name; byte[] name_buf = encoding.GetBytes(file_name); if (name_buf.Length > 0xff) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong); } uint hash = Checksum(name_buf); byte file_type = GetFileType(ypf_options.Version, file_name); for (int i = 0; i < name_buf.Length; ++i) { name_buf[i] = (byte)(name_buf[i] ^ ypf_options.Key); } file_table.Add(new YpfEntry { Name = file_name, IndexName = name_buf, NameHash = hash, FileType = file_type, IsPacked = 0 == file_type, }); data_offset += (uint)(0x17 + name_buf.Length); } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } } file_table.Sort((a, b) => a.NameHash.CompareTo(b.NameHash)); output.Position = data_offset; uint current_offset = data_offset; foreach (var entry in file_table) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = current_offset; using (var input = File.OpenRead(entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue || current_offset + file_size > uint.MaxValue) { throw new FileSizeException(); } entry.UnpackedSize = (uint)file_size; using (var checked_stream = new CheckedStream(output, new Adler32())) { if (entry.IsPacked) { var start = output.Position; using (var zstream = new ZLibStream(checked_stream, CompressionMode.Compress, CompressionLevel.Level9, true)) { input.CopyTo(zstream); } entry.Size = (uint)(output.Position - start); } else { input.CopyTo(checked_stream); entry.Size = entry.UnpackedSize; } checked_stream.Flush(); entry.CheckSum = checked_stream.CheckSumValue; current_offset += entry.Size; } } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = 0; using (var writer = new BinaryWriter(output, encoding, true)) { writer.Write(Signature); writer.Write(ypf_options.Version); writer.Write(file_table.Count); writer.Write(data_offset); writer.BaseStream.Seek(0x20, SeekOrigin.Begin); foreach (var entry in file_table) { writer.Write(entry.NameHash); byte name_len = (byte)~Parser.DecryptLength(scheme.SwapTable, (byte)entry.IndexName.Length); writer.Write(name_len); writer.Write(entry.IndexName); writer.Write(entry.FileType); writer.Write(entry.IsPacked); writer.Write(entry.UnpackedSize); writer.Write(entry.Size); writer.Write((uint)entry.Offset); writer.Write(entry.CheckSum); } } }
private static Plugin Encode(Plugin p, string dirName, string fileName, byte[] input, EncodeMode mode) { byte[] fileNameUTF8 = Encoding.UTF8.GetBytes(fileName); if (fileName.Length == 0 || 512 <= fileNameUTF8.Length) { throw new FileDecodeFailException($"Filename's UTF8 encoded length should be shorter than 512B"); } // Check Overwrite bool fileOverwrite = false; if (p.Sections.ContainsKey(dirName)) { // [{dirName}] section exists, check if there is already same file encoded List <string> lines = p.Sections[dirName].GetLines(); if (lines.FirstOrDefault(x => x.Equals(fileName, StringComparison.OrdinalIgnoreCase)) != null) { fileOverwrite = true; } } string encodedStr; using (MemoryStream bodyStream = new MemoryStream()) using (MemoryStream footerStream = new MemoryStream()) using (MemoryStream concatStream = new MemoryStream()) { // [Stage 1] Compress file with zlib switch (mode) { case EncodeMode.Compress: { using (ZLibStream zs = new ZLibStream(bodyStream, CompressionMode.Compress, CompressionLevel.Level6, true)) { zs.Write(input, 0, input.Length); } bodyStream.Position = 0; } break; case EncodeMode.Raw: { bodyStream.Write(input, 0, input.Length); bodyStream.Position = 0; } break; default: throw new InternalException($"Wrong EncodeMode [{mode}]"); } // [Stage 2] Generate first footer byte[] rawFooter = new byte[0x226]; // 0x550 { // 0x000 - 0x1FF : Filename and its length rawFooter[0] = (byte)fileNameUTF8.Length; fileNameUTF8.CopyTo(rawFooter, 1); for (int i = 1 + fileNameUTF8.Length; i < 0x200; i++) { rawFooter[i] = 0; // Null Pad } // 0x200 - 0x207 : 8B -> Length of raw file, in little endian BitConverter.GetBytes(input.Length).CopyTo(rawFooter, 0x200); switch (mode) { case EncodeMode.Compress: // Type 1 // 0x208 - 0x20F : 8B -> Length of zlibed body, in little endian BitConverter.GetBytes(bodyStream.Length).CopyTo(rawFooter, 0x208); // 0x210 - 0x21F : 16B -> Null padding for (int i = 0x210; i < 0x220; i++) { rawFooter[i] = 0; } break; case EncodeMode.Raw: // Type 2 // 0x208 - 0x21F : 16B -> Null padding for (int i = 0x208; i < 0x220; i++) { rawFooter[i] = 0; } break; default: throw new InternalException($"Wrong EncodeMode [{mode}]"); } // 0x220 - 0x223 : CRC32 of raw file uint crc32 = Crc32Checksum.Crc32(input); BitConverter.GetBytes(crc32).CopyTo(rawFooter, 0x220); // 0x224 : 1B -> Compress Mode (Type 1 : 00, Type 2 : 01) rawFooter[0x224] = (byte)mode; // 0x225 : 1B -> ZLib Compress Level (Type 1 : 01 ~ 09, Type 2 : 00) switch (mode) { case EncodeMode.Compress: // Type 1 rawFooter[0x225] = (byte)CompressionLevel.Level6; break; case EncodeMode.Raw: // Type 2 rawFooter[0x225] = 0; break; default: throw new InternalException($"Wrong EncodeMode [{mode}]"); } } // [Stage 3] Compress first footer using (ZLibStream zs = new ZLibStream(footerStream, CompressionMode.Compress, CompressionLevel.Default, true)) { zs.Write(rawFooter, 0, rawFooter.Length); } footerStream.Position = 0; // [Stage 4] Concat body and footer bodyStream.CopyTo(concatStream); footerStream.CopyTo(concatStream); bodyStream.Position = 0; footerStream.Position = 0; // [Stage 5] Generate final footer { byte[] finalFooter = new byte[0x24]; // 0x00 - 0x04 : 4B -> CRC32 of compressed body and compressed footer uint crc32 = Crc32Checksum.Crc32(concatStream.ToArray()); BitConverter.GetBytes(crc32).CopyTo(finalFooter, 0x00); // 0x04 - 0x08 : 4B -> Unknown - Always 1 BitConverter.GetBytes((uint)1).CopyTo(finalFooter, 0x04); // 0x08 - 0x0B : 4B -> ZLBArchive version (Always 2) BitConverter.GetBytes((uint)2).CopyTo(finalFooter, 0x08); // 0x0C - 0x0F : 4B -> Zlib Compressed Footer Length BitConverter.GetBytes((int)footerStream.Length).CopyTo(finalFooter, 0x0C); // 0x10 - 0x17 : 8B -> Zlib Compressed File Length BitConverter.GetBytes(bodyStream.Length).CopyTo(finalFooter, 0x10); // 0x18 - 0x1B : 4B -> Unknown - Always 1 BitConverter.GetBytes((uint)1).CopyTo(finalFooter, 0x18); // 0x1C - 0x23 : 8B -> Unknown - Always 0 for (int i = 0x1C; i < 0x24; i++) { finalFooter[i] = 0; } concatStream.Write(finalFooter, 0, finalFooter.Length); } // [Stage 6] Encode body, footer and finalFooter with Base64 encodedStr = Convert.ToBase64String(concatStream.ToArray()); // Remove Base64 Padding (==, =) if (encodedStr.EndsWith("==", StringComparison.Ordinal)) { encodedStr = encodedStr.Substring(0, encodedStr.Length - 2); } else if (encodedStr.EndsWith("=", StringComparison.Ordinal)) { encodedStr = encodedStr.Substring(0, encodedStr.Length - 1); } } // [Stage 7] Tokenize encoded string into 4090B. string section = $"EncodedFile-{dirName}-{fileName}"; List <IniKey> keys = new List <IniKey>(); for (int i = 0; i <= (encodedStr.Length / 4090); i++) { if (i < (encodedStr.Length / 4090)) // 1 Line is 4090 characters { keys.Add(new IniKey(section, i.ToString(), encodedStr.Substring(i * 4090, 4090))); // X=eJyFk0Fr20AQhe8G... } else // Last Iteration { keys.Add(new IniKey(section, i.ToString(), encodedStr.Substring(i * 4090, encodedStr.Length - (i * 4090)))); // X=N3q8ryccAAQWuBjqA5QvAAAAAA (end) keys.Insert(0, new IniKey(section, "lines", i.ToString())); // lines=X } } // [Stage 8] Before writing to file, backup original plugin string tempFile = Path.GetTempFileName(); File.Copy(p.FullPath, tempFile, true); // [Stage 9] Write to file try { // Write folder info to [EncodedFolders] bool writeFolderSection = true; if (p.Sections.ContainsKey("EncodedFolders")) { List <string> folders = p.Sections["EncodedFolders"].GetLines(); if (0 < folders.Count(x => x.Equals(dirName, StringComparison.OrdinalIgnoreCase))) { writeFolderSection = false; } } if (writeFolderSection) { Ini.WriteRawLine(p.FullPath, "EncodedFolders", dirName, false); } // Write file info into [{dirName}] Ini.SetKey(p.FullPath, dirName, fileName, $"{input.Length},{encodedStr.Length}"); // UncompressedSize,EncodedSize // Write encoded file into [EncodedFile-{dirName}-{fileName}] if (fileOverwrite) { Ini.DeleteSection(p.FullPath, section); // Delete existing encoded file } Ini.SetKeys(p.FullPath, keys); // Write into } catch { // Error -> Rollback! File.Copy(tempFile, p.FullPath, true); throw new FileDecodeFailException($"Error while writing encoded file into [{p.FullPath}]"); } finally { // Delete temp script File.Delete(tempFile); } // [Stage 10] Refresh Plugin return(p.Project.RefreshPlugin(p)); }
public override void Write(SoundInput source, Stream output) { using (var wav = new XoredStream(output, 0x21, true)) using (var zstream = new ZLibStream(wav, CompressionMode.Compress, CompressionLevel.Level9)) base.Write(source, zstream); }
public GameSave(string fileName) { Bytes = File.ReadAllBytes(FileName = fileName); IsRemaster = BitConverter.ToInt32(Bytes, 8) == 0; SaveType = IsRemaster switch { true when Path.GetExtension(fileName) is "" => SaveType.Switch, true => SaveType.Remaster, false => SaveType.Original, }; var pattern = SaveType == SaveType.Switch ? SwitchFilePattern : RemasterFilePattern; if (IsRemaster && Path.GetExtension(fileName) != ".bin" && !Path.GetFileNameWithoutExtension(fileName).StartsWith(pattern)) { throw new NotSupportedException($"Save file is not a user save and changing them can lead to the game infinite looping. The editor only supports saves that start with {pattern}."); } _header = new(this); Buffs = FilterToApplicable(Amalur.Buffs.Values, _header.IsFateswornAware); ItemDefinitions = FilterToApplicable(Amalur.ItemDefinitions.Values, _header.IsFateswornAware); if (BitConverter.ToInt32(Bytes, BodyStart) == CompressedFlag) { Body = new byte[BitConverter.ToInt32(Bytes, BodyStart + 4)]; if (Body.Length != _header.BodyDataLength) { throw new NotSupportedException($"Save file appears corrupted. The header states that the body should have {_header.BodyDataLength} bytes, but the decompressed size is {Body.Length}"); } var bundleInfoStart = BodyStart + 12; var bundleInfoSize = BitConverter.ToInt32(Bytes, bundleInfoStart - 4); using var bundleInfoData = new ZLibStream(new MemoryStream(Bytes, bundleInfoStart, bundleInfoSize), CompressionMode.Decompress); var endOfBundle = bundleInfoData.ReadAll(Body); var gameStateStart = bundleInfoStart + bundleInfoSize + 4; var gameStateSize = BitConverter.ToInt32(Bytes, gameStateStart - 4); using var gameStateData = new ZLibStream(new MemoryStream(Bytes, gameStateStart, gameStateSize), CompressionMode.Decompress); gameStateData.ReadAll(Body.AsSpan(endOfBundle, Body.Length - endOfBundle)); } else { Body = Bytes.AsSpan(BodyStart, BodyDataLength).ToArray(); } _originalBodyLength = Body.Length; Stash = Stash.TryCreateStash(this); ReadOnlySpan <byte> data = Body; _bagOffset = GetBagOffset(data); _gameStateStartOffset = data.IndexOf(new byte[5] { 0xF7, 0x5D, 0x3C, 0x00, 0x0A }); var typeSectionOffset = data.IndexOf(new byte[5] { 0x23, 0xCC, 0x58, 0x00, 0x06 }) is int ix and > -1 ? ix : data.IndexOf(new byte[5] { 0x23, 0xCC, 0x58, 0x00, 0x04 }) is int pix and > -1 ? pix : data.IndexOf(new byte[5] { 0x23, 0xCC, 0x58, 0x00, 0x03 }); _dataLengthOffsets = new[] { _gameStateStartOffset + 5, // gameStateSize data.IndexOf(new byte[5] { 0x0C, 0xAE, 0x32, 0x00, 0x00 }) + 5, // unknown length 1 typeSectionOffset + 5, // type section length }; _itemContainer = new(this, data.IndexOf(new byte[5] { 0xD3, 0x34, 0x43, 0x00, 0x00 }), 0x00_24_D5_68_00_00_00_0Bul); _itemBuffsContainer = new(this, data.IndexOf(new byte[5] { 0xBB, 0xD5, 0x43, 0x00, 0x00 }), 0x00_28_60_84_00_00_00_0Bul); _itemSocketsContainer = new(this, data.IndexOf(new byte[5] { 0x93, 0xCC, 0x80, 0x00, 0x00 }), 0x00_59_36_38_00_00_00_0Bul); var itemLocs = _itemContainer.ToDictionary(x => x.id, x => (x.offset, x.dataLength)); var itemBuffsLocs = _itemBuffsContainer.ToDictionary(x => x.id, x => (x.offset, x.dataLength)); var itemSocketsLocs = _itemSocketsContainer.ToDictionary(x => x.id, x => (x.offset, x.dataLength)); int dataLength, playerActor = 0; var candidates = new List <(int id, int typeIdOffset, QuestItemDefinition?questItemDef)>(); for (int ixOfActor = _dataLengthOffsets[^ 1] + 4; BitConverter.ToInt32(Body, ixOfActor) == 0x00_75_2D_06; ixOfActor += dataLength)
void EncryptedFileCopy(FileStream file, Xp3Entry xp3entry, Stream output, bool compress) { if (file.Length > int.MaxValue) { throw new FileSizeException(); } using (var map = MemoryMappedFile.CreateFromFile(file, null, 0, MemoryMappedFileAccess.Read, null, HandleInheritability.None, true)) { uint unpacked_size = (uint)file.Length; xp3entry.UnpackedSize = (uint)unpacked_size; xp3entry.Size = (uint)unpacked_size; using (var view = map.CreateViewAccessor(0, unpacked_size, MemoryMappedFileAccess.Read)) { var segment = new Xp3Segment { IsCompressed = compress, Offset = output.Position, Size = unpacked_size, PackedSize = unpacked_size, }; if (compress) { output = new ZLibStream(output, CompressionMode.Compress, CompressionLevel.Level9, true); } unsafe { byte[] read_buffer = new byte[81920]; byte * ptr = view.GetPointer(0); try { var checksum = new Adler32(); bool hash_after_crypt = xp3entry.Cipher.HashAfterCrypt; if (!hash_after_crypt) { xp3entry.Hash = checksum.Update(ptr, (int)unpacked_size); } int offset = 0; int remaining = (int)unpacked_size; while (remaining > 0) { int amount = Math.Min(remaining, read_buffer.Length); remaining -= amount; Marshal.Copy((IntPtr)(ptr + offset), read_buffer, 0, amount); xp3entry.Cipher.Encrypt(xp3entry, offset, read_buffer, 0, amount); if (hash_after_crypt) { checksum.Update(read_buffer, 0, amount); } output.Write(read_buffer, 0, amount); offset += amount; } if (hash_after_crypt) { xp3entry.Hash = checksum.Value; } } finally { view.SafeMemoryMappedViewHandle.ReleasePointer(); if (compress) { var dest = (output as ZLibStream).BaseStream; output.Dispose(); segment.PackedSize = (uint)(dest.Position - segment.Offset); xp3entry.Size = segment.PackedSize; } xp3entry.Segments.Add(segment); } } } } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var xp3_options = GetOptions <Xp3Options> (options); ICrypt scheme = xp3_options.Scheme; bool compress_index = xp3_options.CompressIndex; bool compress_contents = xp3_options.CompressContents; bool retain_dirs = xp3_options.RetainDirs; bool use_encryption = !(scheme is NoCrypt); using (var writer = new BinaryWriter(output, Encoding.ASCII, true)) { writer.Write(s_xp3_header); if (2 == xp3_options.Version) { writer.Write((long)0x17); writer.Write((int)1); writer.Write((byte)0x80); writer.Write((long)0); } long index_pos_offset = writer.BaseStream.Position; writer.BaseStream.Seek(8, SeekOrigin.Current); int callback_count = 0; var used_names = new HashSet <string>(); var dir = new List <Xp3Entry>(); long current_offset = writer.BaseStream.Position; foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } string name = entry.Name; if (!retain_dirs) { name = Path.GetFileName(name); } else { name = name.Replace(@"\", "/"); } if (!used_names.Add(name)) { Trace.WriteLine("duplicate name", entry.Name); continue; } var xp3entry = new Xp3Entry { Name = name, Cipher = scheme, IsEncrypted = use_encryption && !(scheme.StartupTjsNotEncrypted && VFS.IsPathEqualsToFileName(name, "startup.tjs")) }; bool compress = compress_contents && ShouldCompressFile(entry); using (var file = File.Open(name, FileMode.Open, FileAccess.Read)) { if (!xp3entry.IsEncrypted || 0 == file.Length) { RawFileCopy(file, xp3entry, output, compress); } else { EncryptedFileCopy(file, xp3entry, output, compress); } } dir.Add(xp3entry); } long index_pos = writer.BaseStream.Position; writer.BaseStream.Position = index_pos_offset; writer.Write(index_pos); writer.BaseStream.Position = index_pos; using (var header = new BinaryWriter(new MemoryStream(dir.Count * 0x58), Encoding.Unicode)) { if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } long dir_pos = 0; foreach (var entry in dir) { header.BaseStream.Position = dir_pos; header.Write((uint)0x656c6946); // "File" long header_size_pos = header.BaseStream.Position; header.Write((long)0); header.Write((uint)0x6f666e69); // "info" header.Write((long)(4 + 8 + 8 + 2 + entry.Name.Length * 2)); header.Write((uint)(use_encryption ? 0x80000000 : 0)); header.Write((long)entry.UnpackedSize); header.Write((long)entry.Size); header.Write((short)entry.Name.Length); foreach (char c in entry.Name) { header.Write(c); } header.Write((uint)0x6d676573); // "segm" header.Write((long)0x1c); var segment = entry.Segments.First(); header.Write((int)(segment.IsCompressed ? 1 : 0)); header.Write((long)segment.Offset); header.Write((long)segment.Size); header.Write((long)segment.PackedSize); header.Write((uint)0x726c6461); // "adlr" header.Write((long)4); header.Write((uint)entry.Hash); dir_pos = header.BaseStream.Position; long header_size = dir_pos - header_size_pos - 8; header.BaseStream.Position = header_size_pos; header.Write(header_size); } header.BaseStream.Position = 0; writer.Write(compress_index); long unpacked_dir_size = header.BaseStream.Length; if (compress_index) { if (null != callback) { callback(callback_count++, null, arcStrings.MsgCompressingIndex); } long packed_dir_size_pos = writer.BaseStream.Position; writer.Write((long)0); writer.Write(unpacked_dir_size); long dir_start = writer.BaseStream.Position; using (var zstream = new ZLibStream(writer.BaseStream, CompressionMode.Compress, CompressionLevel.Level9, true)) header.BaseStream.CopyTo(zstream); long packed_dir_size = writer.BaseStream.Position - dir_start; writer.BaseStream.Position = packed_dir_size_pos; writer.Write(packed_dir_size); } else { writer.Write(unpacked_dir_size); header.BaseStream.CopyTo(writer.BaseStream); } } } output.Seek(0, SeekOrigin.End); }
public override Stream OpenEntry(ArcFile arc, Entry entry) { var parc = arc as PazArchiveBase; var pent = entry as PazEntry; if (null == parc || null == pent) { return(base.OpenEntry(arc, entry)); } Stream input = null; try { long part_offset = 0; long entry_start = pent.Offset; long entry_end = pent.Offset + pent.AlignedSize; foreach (var part in parc.Parts) { long part_end_offset = part_offset + part.MaxOffset; if (entry_start < part_end_offset) { uint part_size = (uint)Math.Min(entry_end - entry_start, part_end_offset - entry_start); var entry_part = part.CreateStream(entry_start - part_offset, part_size); if (input != null) { input = new ConcatStream(input, entry_part); } else { input = entry_part; } entry_start += part_size; if (entry_start >= entry_end) { break; } } part_offset = part_end_offset; } if (null == input) { return(Stream.Null); } if (parc.XorKey != 0) { input = new XoredStream(input, parc.XorKey); } input = parc.DecryptEntry(input, pent); if (pent.Size < pent.AlignedSize) { input = new LimitStream(input, pent.Size); } if (pent.IsPacked) { input = new ZLibStream(input, CompressionMode.Decompress); } return(input); } catch { if (input != null) { input.Dispose(); } throw; } }
private static void WriteCompressedFile(BinaryWriter writer, EArchive archive, EArchiveFileCreation file) { using (Stream input = File.OpenRead(file.FilePath)) { using (BinaryReader reader = new BinaryReader(input)) { int chunkSize = (int)archive.ChunkSize * 1024; file.SizeUncompressedOriginal = (int)input.Length; file.SizeUncompressed = (int)input.Length; //* get the number of chunks int chunks = (int)input.Length / chunkSize, remaining = (int)input.Length; //* if the integer division wasn't even, add a chunk if (input.Length % chunkSize != 0) { chunks++; } //* set compressed size to zero and sum as we go file.SizeCompressed = 0; if (file.ChunkKey == 0) { file.ChunkKey = (ushort)(new Random().Next(0, ushort.MaxValue)); } for (int chunk = 0; chunk < chunks; chunk++) { //* use a memory stream for chunking using (MemoryStream memory = new MemoryStream()) { int read = (remaining > chunkSize) ? chunkSize : remaining; //* store the chunk of compressed data to a memory stream memory.Write(reader.ReadBytes(read), 0, read); //* move to the start of the chunk memory.Seek(0, SeekOrigin.Begin); //* now compress it and write it to our compressed stream using (MemoryStream compressed = new MemoryStream()) { using (ZLibStream compressor = new ZLibStream(compressed, CompressionMode.Compress, CompressionLevel.Best, true)) memory.CopyTo(compressor); compressed.Seek(0, SeekOrigin.Begin); int sizeCompressed = (int)compressed.Length, sizeUncompressed = read; //* write chunk sizes if (chunk == 0) { //* for the first one we need to encrypt it long chunkKey = (MasterChunkKeyA * file.ChunkKey) + MasterChunkKeyB; int compressedKey = (int)(chunkKey >> 32); int uncompressedKey = (int)(chunkKey & 0xFFFFFFFF); sizeCompressed ^= compressedKey; sizeUncompressed ^= uncompressedKey; } writer.Write(sizeCompressed); writer.Write(sizeUncompressed); for (int position = 0; position < compressed.Length; position++) { writer.Write((byte)compressed.ReadByte()); } //* align for next chunk int alignment = Align(writer, 4); file.SizeCompressed += (int)compressed.Length + 8 + alignment; } remaining -= read; } } } } }
private static void ExtractCompressedFile(EArchive archive, EArchiveFile file, BinaryReader reader, string path) { //* get the number of chunks we have int chunkSize = (int)archive.ChunkSize * 1024; int chunks = file.SizeUncompressed / chunkSize; //* if the integer division wasn't even, add 1 more chunk if (file.SizeUncompressed % chunkSize != 0) { chunks++; } //try { using (Stream write = File.Create(path + file.Path)) { using (BinaryWriter writer = new BinaryWriter(write)) { //* loop through each chunk writing data as we go for (int index = 0; index < chunks; index++) { //* align our bytes if (index > 0) { int offset = 4 - (int)(reader.BaseStream.Position % 4); if (offset > 3) { offset = 0; } reader.BaseStream.Seek(offset, SeekOrigin.Current); } uint sizeCompressed = reader.ReadUInt32(), sizeUncompressed = reader.ReadUInt32(); //* if our header is encrypted, decrypt the sizes if (index == 0 && file.HeaderEncrypted) { long chunkKey = (MasterChunkKeyA * file.ChunkKey) + MasterChunkKeyB; uint compressedKey = (uint)(chunkKey >> 32); uint uncompressedKey = (uint)(chunkKey & 0xFFFFFFFF); sizeCompressed ^= compressedKey; sizeUncompressed ^= uncompressedKey; } using (MemoryStream memory = new MemoryStream()) { //* store the chunk of compressed data to a memory stream memory.Write(reader.ReadBytes((int)sizeCompressed), 0, (int)sizeCompressed); //* move to the start of the chunk memory.Seek(0, SeekOrigin.Begin); //* now decompress it and write it to our file using (ZLibStream decompressor = new ZLibStream(memory, CompressionMode.Decompress)) { for (int position = 0; position < sizeUncompressed; position++) { writer.Write((byte)decompressor.ReadByte()); } } } } } } //} //catch (Exception ex) { // throw ex; //} }
public override ArcFile TryOpen(ArcView file) { int version = file.View.ReadInt32(0); int count = file.View.ReadInt32(4); if (!IsSaneCount(count)) { return(null); } uint index_size = file.View.ReadUInt32(0xC); if (index_size < 2 || index_size > file.MaxOffset) { return(null); } long base_offset = 0x118 + index_size; using (var mem = file.CreateStream(0x118, index_size)) using (var z = new ZLibStream(mem, CompressionMode.Decompress)) using (var index = new BinaryStream(z, file.Name)) { var dir = new List <Entry> (count); string cur_dir = ""; for (int i = 0; i < count; ++i) { int name_length = index.ReadInt32(); if (name_length <= 0) { return(null); } var name = index.ReadCString(name_length); if (version > 3) { bool is_dir = index.ReadInt32() != 0; if (is_dir) { cur_dir = name; index.ReadInt64(); index.ReadInt32(); index.ReadInt64(); continue; } if (cur_dir.Length > 0) { name = Path.Combine(cur_dir, name); } } var entry = Create <PackedEntry> (name); entry.Offset = index.ReadUInt32() + base_offset; entry.UnpackedSize = index.ReadUInt32(); index.ReadUInt32(); uint is_packed = index.ReadUInt32(); uint packed_size = index.ReadUInt32(); entry.IsPacked = is_packed != 0 && packed_size != 0; if (entry.IsPacked) { entry.Size = packed_size; } else { entry.Size = entry.UnpackedSize; } if (!entry.CheckPlacement(file.MaxOffset)) { return(null); } dir.Add(entry); } return(new ArcFile(file, this, dir)); } }
public void ZLibStream_Compress_1() { void Template(string sampleFileName, ZLibCompLevel level) { string tempDecompFile = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); string tempArchiveFile = tempDecompFile + ".zz"; try { string sampleFile = Path.Combine(TestSetup.SampleDir, sampleFileName); using (FileStream sampleFs = new FileStream(sampleFile, FileMode.Open, FileAccess.Read, FileShare.Read)) using (FileStream archiveFs = new FileStream(tempArchiveFile, FileMode.Create, FileAccess.Write, FileShare.None)) using (ZLibStream zs = new ZLibStream(archiveFs, ZLibMode.Compress, level, true)) { sampleFs.CopyTo(zs); zs.Flush(); Assert.AreEqual(sampleFs.Length, zs.TotalIn); Assert.AreEqual(archiveFs.Length, zs.TotalOut); } Process proc = new Process { StartInfo = new ProcessStartInfo { UseShellExecute = false, CreateNoWindow = true, FileName = Path.Combine(TestSetup.SampleDir, "pigz.exe"), Arguments = $"-k -d {tempArchiveFile}", } }; proc.Start(); proc.WaitForExit(); Assert.IsTrue(proc.ExitCode == 0); byte[] decompDigest; byte[] originDigest; using (FileStream fs = new FileStream(sampleFile, FileMode.Open, FileAccess.Read, FileShare.Read)) { HashAlgorithm hash = SHA256.Create(); originDigest = hash.ComputeHash(fs); } using (FileStream fs = new FileStream(tempDecompFile, FileMode.Open, FileAccess.Read, FileShare.Read)) { HashAlgorithm hash = SHA256.Create(); decompDigest = hash.ComputeHash(fs); } Assert.IsTrue(originDigest.SequenceEqual(decompDigest)); } finally { if (File.Exists(tempArchiveFile)) { File.Delete(tempArchiveFile); } if (File.Exists(tempDecompFile)) { File.Delete(tempDecompFile); } } } Template("ex1.jpg", ZLibCompLevel.Default); Template("ex2.jpg", ZLibCompLevel.BestCompression); Template("ex3.jpg", ZLibCompLevel.BestSpeed); }
public override ArcFile TryOpen(ArcView file) { if (!file.View.AsciiEqual(8, "AlicArch")) { return(TryOpenV3(file)); } if (!file.View.AsciiEqual(0x1C, "INFO")) { return(null); } int version = file.View.ReadInt32(0x10); long base_offset = file.View.ReadUInt32(0x18); uint packed_size = file.View.ReadUInt32(0x20); int unpacked_size = file.View.ReadInt32(0x24); int count = file.View.ReadInt32(0x28); if (!IsSaneCount(count)) { return(null); } var dir = new List <Entry> (count); var name_buf = new byte[0x40]; using (var input = file.CreateStream(0x2C, packed_size)) using (var zstream = new ZLibStream(input, CompressionMode.Decompress)) using (var index = new BinaryReader(zstream)) { for (int i = 0; i < count; ++i) { int name_length = index.ReadInt32(); int index_step = index.ReadInt32(); if (name_length <= 0 || name_length > index_step || index_step > unpacked_size) { return(null); } if (index_step > name_buf.Length) { name_buf = new byte[index_step]; } if (index_step != index.Read(name_buf, 0, index_step)) { return(null); } var name = Encodings.cp932.GetString(name_buf, 0, name_length); var entry = FormatCatalog.Instance.Create <Entry> (name); index.ReadInt32(); index.ReadInt32(); if (version < 2) { index.ReadInt32(); } entry.Offset = index.ReadUInt32() + base_offset; entry.Size = index.ReadUInt32(); if (!entry.CheckPlacement(file.MaxOffset)) { return(null); } dir.Add(entry); } return(new ArcFile(file, this, dir)); } }
public override ImageData Read(Stream stream, ImageMetaData info) { using (var input = new XoredStream(stream, 0x21, true)) using (var zstream = new ZLibStream(input, CompressionMode.Decompress)) return(base.Read(zstream, info)); }
AzArchive ReadIndex(ArcView file, byte[] header, EncryptionScheme scheme) { int ext_count = LittleEndian.ToInt32(header, 4); int count = LittleEndian.ToInt32(header, 8); uint index_length = LittleEndian.ToUInt32(header, 12); if (ext_count < 1 || ext_count > 8 || !IsSaneCount(count) || index_length >= file.MaxOffset) { return(null); } var packed_index = file.View.ReadBytes(header.Length, index_length); if (packed_index.Length != index_length) { return(null); } Decrypt(packed_index, header.Length, scheme.IndexKey); uint checksum = LittleEndian.ToUInt32(packed_index, 0); if (checksum != Adler32.Compute(packed_index, 4, packed_index.Length - 4)) { if (checksum != Crc32.Compute(packed_index, 4, packed_index.Length - 4)) { throw new InvalidFormatException("Index checksum mismatch"); } } uint base_offset = (uint)header.Length + index_length; using (var input = new MemoryStream(packed_index, 4, packed_index.Length - 4)) using (var zstream = new ZLibStream(input, CompressionMode.Decompress)) using (var index = new BinaryReader(zstream)) { var dir = new List <Entry> (count); var name_buffer = new byte[0x20]; for (int i = 0; i < count; ++i) { uint offset = index.ReadUInt32(); uint size = index.ReadUInt32(); uint crc = index.ReadUInt32(); index.ReadInt32(); if (name_buffer.Length != index.Read(name_buffer, 0, name_buffer.Length)) { return(null); } var name = Binary.GetCString(name_buffer, 0, 0x20); if (0 == name.Length) { return(null); } var entry = FormatCatalog.Instance.Create <Entry> (name); entry.Offset = base_offset + offset; entry.Size = size; if (!entry.CheckPlacement(file.MaxOffset)) { return(null); } dir.Add(entry); } uint content_key = GetContentKey(file, dir, scheme); return(new AzArchive(file, this, dir, scheme.IndexKey, content_key)); } }
public override ArcFile TryOpen(ArcView file) { int count = file.View.ReadInt32(4); if (!IsSaneCount(count)) { return(null); } uint index_size = file.View.ReadUInt32(0xC); if (index_size < 2 || index_size > file.MaxOffset) { return(null); } long base_offset = 0x118 + index_size; using (var mem = file.CreateStream(0x118, index_size)) using (var z = new ZLibStream(mem, CompressionMode.Decompress)) using (var index = new BinaryReader(z)) { var name_buffer = new byte[0x100]; var dir = new List <Entry> (count); for (int i = 0; i < count; ++i) { int name_length = index.ReadInt32(); if (name_length <= 0 || name_length > name_buffer.Length) { return(null); } if (name_length != index.Read(name_buffer, 0, name_length)) { return(null); } var name = Encodings.cp932.GetString(name_buffer, 0, name_length); var entry = FormatCatalog.Instance.Create <PackedEntry> (name); entry.Offset = index.ReadUInt32() + base_offset; entry.UnpackedSize = index.ReadUInt32(); index.ReadUInt32(); uint is_packed = index.ReadUInt32(); uint packed_size = index.ReadUInt32(); entry.IsPacked = is_packed != 0 && packed_size != 0; if (entry.IsPacked) { entry.Size = packed_size; } else { entry.Size = entry.UnpackedSize; } if (!entry.CheckPlacement(file.MaxOffset)) { return(null); } dir.Add(entry); } return(new ArcFile(file, this, dir)); } }
public EncodedFileInfo(Plugin p, string dirName, string fileName) { string section = $"EncodedFile-{dirName}-{fileName}"; if (p.Sections.ContainsKey(section) == false) { throw new FileDecodeFailException($"[{dirName}\\{fileName}] does not exists in [{p.FullPath}]"); } List <string> encodedList = p.Sections[$"EncodedFile-{dirName}-{fileName}"].GetLinesOnce(); if (Ini.GetKeyValueFromLine(encodedList[0], out string key, out string value)) { throw new FileDecodeFailException("Encoded lines are malformed"); } // [Stage 1] Concat sliced base64-encoded lines into one string byte[] decoded; { int.TryParse(value, out int blockCount); encodedList.RemoveAt(0); // Remove "lines=n" // Each line is 64KB block if (Ini.GetKeyValueFromLines(encodedList, out List <string> keys, out List <string> base64Blocks)) { throw new FileDecodeFailException("Encoded lines are malformed"); } StringBuilder b = new StringBuilder(); foreach (string block in base64Blocks) { b.Append(block); } switch (b.Length % 4) { case 0: break; case 1: throw new FileDecodeFailException("Encoded lines are malformed"); case 2: b.Append("=="); break; case 3: b.Append("="); break; } decoded = Convert.FromBase64String(b.ToString()); } // [Stage 2] Read final footer const int finalFooterLen = 0x24; int finalFooterIdx = decoded.Length - finalFooterLen; // 0x00 - 0x04 : 4B -> CRC32 uint full_crc32 = BitConverter.ToUInt32(decoded, finalFooterIdx + 0x00); // 0x0C - 0x0F : 4B -> Zlib Compressed Footer Length int compressedFooterLen = (int)BitConverter.ToUInt32(decoded, finalFooterIdx + 0x0C); int compressedFooterIdx = decoded.Length - (finalFooterLen + compressedFooterLen); // 0x10 - 0x17 : 8B -> Zlib Compressed File Length int compressedBodyLen = (int)BitConverter.ToUInt64(decoded, finalFooterIdx + 0x10); // [Stage 3] Validate final footer this.FinalFooterValid = true; if (compressedBodyLen != compressedFooterIdx) { this.FinalFooterValid = false; } uint calcFull_crc32 = Crc32Checksum.Crc32(decoded, 0, finalFooterIdx); if (full_crc32 != calcFull_crc32) { this.FinalFooterValid = false; } if (this.FinalFooterValid == false) { return; } // [Stage 4] Decompress first footer byte[] rawFooter; using (MemoryStream rawFooterStream = new MemoryStream()) { using (MemoryStream ms = new MemoryStream(decoded, compressedFooterIdx, compressedFooterLen)) using (ZLibStream zs = new ZLibStream(ms, CompressionMode.Decompress, CompressionLevel.Default)) { zs.CopyTo(rawFooterStream); } rawFooter = rawFooterStream.ToArray(); } // [Stage 5] Read first footer this.FirstFooterValid = true; // 0x200 - 0x207 : 8B -> Length of raw file, in little endian int rawBodyLen = (int)BitConverter.ToUInt32(rawFooter, 0x200); // 0x208 - 0x20F : 8B -> Length of zlib-compressed file, in little endian // Note: In Type 2, 0x208 entry is null - padded int compressedBodyLen2 = (int)BitConverter.ToUInt32(rawFooter, 0x208); // 0x220 - 0x223 : 4B -> CRC32C Checksum of zlib-compressed file uint compressedBody_crc32 = BitConverter.ToUInt32(rawFooter, 0x220); // 0x224 : 1B -> Compress Mode (Type 1 : 00, Type 2 : 01) byte compMode = rawFooter[0x224]; // 0x225 : 1B -> ZLib Compress Level (Type 1 : 01~09, Type 2 : 00) byte compLevel = rawFooter[0x225]; // [Stage 6] Validate first footer if (compMode == 0) { this.Mode = EncodedFile.EncodeMode.Compress; if (compLevel < 1 || 9 < compLevel) { this.FirstFooterValid = false; } if (compressedBodyLen2 == 0 || (compressedBodyLen2 != compressedBodyLen)) { this.FirstFooterValid = false; } } else if (compMode == 1) { this.Mode = EncodedFile.EncodeMode.Raw; if (compLevel != 0) { this.FirstFooterValid = false; } if (compressedBodyLen2 != 0) { this.FirstFooterValid = false; } } else // Wrong compMode { this.FirstFooterValid = false; } if (this.FirstFooterValid == false) { return; } // [Stage 7] Decompress body if (compMode == (ushort)EncodedFile.EncodeMode.Compress) { this.RawBodyStream = new MemoryStream(); using (MemoryStream ms = new MemoryStream(decoded, 0, compressedBodyLen)) using (ZLibStream zs = new ZLibStream(ms, CompressionMode.Decompress, CompressionLevel.Default)) { zs.CopyTo(this.RawBodyStream); } this.RawBodyStream.Position = 0; this.CompressedBodyValid = true; } else if (compMode == (ushort)EncodedFile.EncodeMode.Raw) { this.CompressedBodyValid = true; this.RawBodyStream = new MemoryStream(decoded, 0, rawBodyLen); } else { throw new InternalException($"Wrong EncodeMode [{compMode}]"); } // [Stage 8] Validate decompressed body this.RawBodyValid = true; uint calcCompBody_crc32 = Crc32Checksum.Crc32(RawBodyStream.ToArray()); if (compressedBody_crc32 != calcCompBody_crc32) { this.RawBodyValid = false; } // [Stage 9] Return decompressed body stream this.RawBodyStream.Position = 0; }
private void UnpackV2() { int pixel_size = m_bpp / 8; int src_stride = m_width * pixel_size; using (var zlib = new ZLibStream(m_input.AsStream, CompressionMode.Decompress, true)) using (var src = new BinaryReader(zlib)) { if (m_bpp >= 24) { for (int y = 0; y < m_height; ++y) { byte ctl = src.ReadByte(); int dst = y * m_stride; int prev_row = dst - m_stride; switch (ctl) { case 0: src.Read(m_output, dst, pixel_size); for (int x = pixel_size; x < src_stride; ++x) { m_output[dst + x] = (byte)(src.ReadByte() + m_output[dst + x - pixel_size]); } break; case 1: for (int x = 0; x < src_stride; ++x) { m_output[dst + x] = (byte)(src.ReadByte() + m_output[prev_row + x]); } break; case 2: src.Read(m_output, dst, pixel_size); for (int x = pixel_size; x < src_stride; ++x) { m_output[dst + x] = (byte)(src.ReadByte() + m_output[prev_row + x - pixel_size]); } break; case 3: for (int x = src_stride - pixel_size; x > 0; --x) { m_output[dst++] = (byte)(src.ReadByte() + m_output[prev_row++ + pixel_size]); } src.Read(m_output, dst, pixel_size); break; case 4: for (int i = 0; i < pixel_size; ++i) { int w = m_width; byte val = src.ReadByte(); while (w > 0) { m_output[dst] = val; dst += pixel_size; if (0 == --w) { break; } byte next = src.ReadByte(); if (val == next) { int count = src.ReadByte(); for (int j = 0; j < count; ++j) { m_output[dst] = val; dst += pixel_size; } w -= count; if (w > 0) { val = src.ReadByte(); } } else { val = next; } } dst -= src_stride - 1; } break; default: break; } } } else { int dst = 0; for (int y = 0; y < m_height; ++y) { src.Read(m_output, dst, src_stride); dst += m_stride; } } } }
private static MemoryStream Decode(List <string> encodedList) { if (Ini.GetKeyValueFromLine(encodedList[0], out string key, out string value)) { throw new FileDecodeFailException("Encoded lines are malformed"); } // [Stage 1] Concat sliced base64-encoded lines into one string byte[] decoded; { int.TryParse(value, out int blockCount); encodedList.RemoveAt(0); // Remove "lines=n" // Each line is 64KB block if (Ini.GetKeyValueFromLines(encodedList, out List <string> keys, out List <string> base64Blocks)) { throw new FileDecodeFailException("Encoded lines are malformed"); } StringBuilder b = new StringBuilder(); foreach (string block in base64Blocks) { b.Append(block); } switch (b.Length % 4) { case 0: break; case 1: throw new FileDecodeFailException("Encoded lines are malformed"); case 2: b.Append("=="); break; case 3: b.Append("="); break; } decoded = Convert.FromBase64String(b.ToString()); } // [Stage 2] Read final footer const int finalFooterLen = 0x24; int finalFooterIdx = decoded.Length - finalFooterLen; // 0x00 - 0x04 : 4B -> CRC32 uint full_crc32 = BitConverter.ToUInt32(decoded, finalFooterIdx + 0x00); // 0x0C - 0x0F : 4B -> Zlib Compressed Footer Length int compressedFooterLen = (int)BitConverter.ToUInt32(decoded, finalFooterIdx + 0x0C); int compressedFooterIdx = decoded.Length - (finalFooterLen + compressedFooterLen); // 0x10 - 0x17 : 8B -> Zlib Compressed File Length int compressedBodyLen = (int)BitConverter.ToUInt64(decoded, finalFooterIdx + 0x10); // [Stage 3] Validate final footer if (compressedBodyLen != compressedFooterIdx) { throw new FileDecodeFailException($"Encoded file is corrupted"); } uint calcFull_crc32 = Crc32Checksum.Crc32(decoded, 0, finalFooterIdx); if (full_crc32 != calcFull_crc32) { throw new FileDecodeFailException($"Encoded file is corrupted"); } // [Stage 4] Decompress first footer byte[] rawFooter; using (MemoryStream rawFooterStream = new MemoryStream()) { using (MemoryStream ms = new MemoryStream(decoded, compressedFooterIdx, compressedFooterLen)) using (ZLibStream zs = new ZLibStream(ms, CompressionMode.Decompress, CompressionLevel.Default)) { zs.CopyTo(rawFooterStream); } rawFooter = rawFooterStream.ToArray(); } // [Stage 5] Read first footer // 0x200 - 0x207 : 8B -> Length of raw file, in little endian int rawBodyLen = (int)BitConverter.ToUInt32(rawFooter, 0x200); // 0x208 - 0x20F : 8B -> Length of zlib-compressed file, in little endian // Note: In Type 2, 0x208 entry is null - padded int compressedBodyLen2 = (int)BitConverter.ToUInt32(rawFooter, 0x208); // 0x220 - 0x223 : 4B -> CRC32C Checksum of zlib-compressed file uint compressedBody_crc32 = BitConverter.ToUInt32(rawFooter, 0x220); // 0x224 : 1B -> Compress Mode (Type 1 : 00, Type 2 : 01) byte compMode = rawFooter[0x224]; // 0x225 : 1B -> ZLib Compress Level (Type 1 : 01~09, Type 2 : 00) byte compLevel = rawFooter[0x225]; // [Stage 6] Validate first footer if (compMode == 0) // Type 1, zlib { if (compressedBodyLen2 == 0 || (compressedBodyLen2 != compressedBodyLen)) { throw new FileDecodeFailException($"Encoded file is corrupted: compMode"); } if (compLevel < 1 || 9 < compLevel) { throw new FileDecodeFailException($"Encoded file is corrupted: compLevel"); } } else if (compMode == 1) // Type 2, Raw { if (compressedBodyLen2 != 0) { throw new FileDecodeFailException($"Encoded file is corrupted: compMode"); } if (compLevel != 0) { throw new FileDecodeFailException($"Encoded file is corrupted: compLevel"); } } else // Wrong compMode { throw new FileDecodeFailException($"Encoded file is corrupted: compMode"); } // [Stage 7] Decompress body MemoryStream rawBodyStream; // This stream should be alive even after this method returns if (compMode == 0) // Type 1, zlib { rawBodyStream = new MemoryStream(); using (MemoryStream ms = new MemoryStream(decoded, 0, compressedBodyLen)) using (ZLibStream zs = new ZLibStream(ms, CompressionMode.Decompress, false)) { zs.CopyTo(rawBodyStream); } rawBodyStream.Position = 0; } else if (compMode == 1) // Type 2, raw { rawBodyStream = new MemoryStream(decoded, 0, rawBodyLen); } else { throw new FileDecodeFailException($"Encoded file is corrupted"); } // [Stage 8] Validate decompressed body uint calcCompBody_crc32 = Crc32Checksum.Crc32(rawBodyStream.ToArray()); if (compressedBody_crc32 != calcCompBody_crc32) { throw new FileDecodeFailException($"Encoded file is corrupted"); } // [Stage 9] Return decompressed body stream rawBodyStream.Position = 0; return(rawBodyStream); }
private static void CompressTemplate(string sampleFileName, ZLibCompLevel level, bool useSpan) { string tempDecompFile = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); string tempArchiveFile = tempDecompFile + ".zz"; try { ZLibCompressOptions compOpts = new ZLibCompressOptions() { Level = level, LeaveOpen = true, }; string sampleFile = Path.Combine(TestSetup.SampleDir, sampleFileName); using (FileStream sampleFs = new FileStream(sampleFile, FileMode.Open, FileAccess.Read, FileShare.Read)) using (FileStream archiveFs = new FileStream(tempArchiveFile, FileMode.Create, FileAccess.Write, FileShare.None)) using (ZLibStream zs = new ZLibStream(archiveFs, compOpts)) { #if !NETFRAMEWORK if (useSpan) { byte[] buffer = new byte[64 * 1024]; int bytesRead; do { bytesRead = sampleFs.Read(buffer.AsSpan()); zs.Write(buffer.AsSpan(0, bytesRead)); } while (0 < bytesRead); } else #endif { sampleFs.CopyTo(zs); } zs.Flush(); Assert.AreEqual(sampleFs.Length, zs.TotalIn); Assert.AreEqual(archiveFs.Length, zs.TotalOut); } int ret = TestHelper.RunPigz(tempArchiveFile); Assert.IsTrue(ret == 0); byte[] decompDigest; byte[] originDigest; using (FileStream fs = new FileStream(sampleFile, FileMode.Open, FileAccess.Read, FileShare.Read)) { originDigest = TestHelper.SHA256Digest(fs); } using (FileStream fs = new FileStream(tempDecompFile, FileMode.Open, FileAccess.Read, FileShare.Read)) { decompDigest = TestHelper.SHA256Digest(fs); } Assert.IsTrue(originDigest.SequenceEqual(decompDigest)); } finally { if (File.Exists(tempArchiveFile)) { File.Delete(tempArchiveFile); } if (File.Exists(tempDecompFile)) { File.Delete(tempDecompFile); } } }
void UnpackZLib(byte[] input, int length, byte[] output) { using (var zinput = new MemoryStream(input, 4, length - 4)) using (var z = new ZLibStream(zinput, CompressionMode.Decompress)) z.Read(output, 0, output.Length); }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var rpa_options = GetOptions <RpaOptions> (options); int callback_count = 0; var file_table = new Dictionary <PyString, ArrayList>(); long data_offset = 0x22; output.Position = data_offset; foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } string name = entry.Name.Replace(@"\", "/"); var rpa_entry = new RpaEntry { Name = name }; using (var file = File.OpenRead(entry.Name)) { var size = file.Length; if (size > uint.MaxValue) { throw new FileSizeException(); } int header_size = (int)Math.Min(size, 0x10); rpa_entry.Offset = output.Position ^ rpa_options.Key; rpa_entry.Header = new byte[header_size]; rpa_entry.UnpackedSize = (uint)size ^ rpa_options.Key; rpa_entry.Size = (uint)(size - header_size); file.Read(rpa_entry.Header, 0, header_size); file.CopyTo(output); } var py_name = new PyString(name); if (file_table.ContainsKey(py_name)) { file_table[py_name].Add(rpa_entry); } else { file_table[py_name] = new ArrayList { rpa_entry } }; } long index_pos = output.Position; string signature = string.Format(CultureInfo.InvariantCulture, "RPA-3.0 {0:x16} {1:x8}\n", index_pos, rpa_options.Key); var header = Encoding.ASCII.GetBytes(signature); if (header.Length > data_offset) { throw new ApplicationException("Signature serialization failed."); } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } using (var index = new ZLibStream(output, CompressionMode.Compress, CompressionLevel.Level9, true)) { var pickle = new Pickle(index); if (!pickle.Dump(file_table)) { throw new ApplicationException("Archive index serialization failed."); } } output.Position = 0; output.Write(header, 0, header.Length); } }
private void WriteV2() { int pixel_size = m_bpp / 8; int src_stride = m_width * pixel_size; m_output.Flush(); using (var zlib = new ZLibStream(m_output.BaseStream, CompressionMode.Compress, true)) using (var output = new BinaryWriter(zlib)) { if (m_bpp >= 24) { for (int y = 0; y < m_height; ++y) { byte ctl = 0; output.Write(ctl); int dst = y * m_stride; int prev_row = dst - m_stride; switch (ctl) { case 0: output.Write(m_input, dst, pixel_size); for (int x = pixel_size; x < src_stride; ++x) { output.Write((byte)(m_input[dst + x] - m_input[dst + x - pixel_size])); } break; case 1: for (int x = 0; x < src_stride; ++x) { output.Write((byte)(m_input[dst + x] - m_input[prev_row + x])); } break; case 2: output.Write(m_input, dst, pixel_size); for (int x = pixel_size; x < src_stride; ++x) { output.Write((byte)(m_input[dst + x] - m_input[prev_row + x - pixel_size])); } break; case 3: for (int x = src_stride - pixel_size; x > 0; --x) { output.Write((byte)(m_input[dst++] - m_input[prev_row++ + pixel_size])); } output.Write(m_input, dst, pixel_size); break; case 4: for (int i = 0; i < pixel_size; ++i) { int w = m_width; byte val = m_input[dst]; output.Write(val); while (w > 0) { dst += pixel_size; if (0 == --w) { break; } byte next = m_input[dst]; output.Write(next); if (val == next) { var count = 255; output.Write(count); dst += pixel_size * count; w -= count; if (w > 0) { val = m_input[dst]; output.Write(val); } } else { val = next; } } dst -= src_stride - 1; } break; default: break; } } } else { int dst = 0; for (int y = 0; y < m_height; ++y) { m_output.Write(m_input, dst, src_stride); dst += m_stride; } } } }
public override ArcFile TryOpen(ArcView file) { if (0x20302e33 != file.View.ReadUInt32(4)) { return(null); } string index_offset_str = file.View.ReadString(8, 16, Encoding.ASCII); long index_offset; if (!long.TryParse(index_offset_str, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out index_offset)) { return(null); } if (index_offset >= file.MaxOffset) { return(null); } uint key; string key_str = file.View.ReadString(0x19, 8, Encoding.ASCII); if (!uint.TryParse(key_str, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out key)) { return(null); } IDictionary dict = null; using (var index = new ZLibStream(file.CreateStream(index_offset), CompressionMode.Decompress)) { var pickle = new Pickle(index); dict = pickle.Load() as IDictionary; } if (null == dict) { return(null); } var dir = new List <Entry> (dict.Count); foreach (DictionaryEntry item in dict) { var name_raw = item.Key as byte[]; var values = item.Value as IList; if (null == name_raw || null == values || values.Count < 1) { Trace.WriteLine("invalid index entry", "RpaOpener.TryOpen"); return(null); } string name = Encoding.UTF8.GetString(name_raw); if (string.IsNullOrEmpty(name)) { return(null); } var tuple = values[0] as IList; if (null == tuple || tuple.Count < 2) { Trace.WriteLine("invalid index tuple", "RpaOpener.TryOpen"); return(null); } var entry = FormatCatalog.Instance.Create <RpaEntry> (name); entry.Offset = (long)(Convert.ToInt64(tuple[0]) ^ key); entry.UnpackedSize = (uint)(Convert.ToInt32(tuple[1]) ^ key); entry.Size = entry.UnpackedSize; if (tuple.Count > 2) { entry.Header = tuple[2] as byte[]; if (null != entry.Header && entry.Header.Length > 0) { entry.Size -= (uint)entry.Header.Length; entry.IsPacked = true; } } dir.Add(entry); } if (dir.Count > 0) { Trace.TraceInformation("[{0}] [{1:X8}] [{2}]", dir[0].Name, dir[0].Offset, dir[0].Size); } return(new ArcFile(file, this, dir)); }
byte[] ReadZlib(Frame frame, Stream packed, bool is_alpha) { using (var zs = new ZLibStream(packed, CompressionMode.Decompress)) return(ReadBlocks(frame, zs, is_alpha)); }
private bool ImportWoff1(Stream stream) { int sizeRead = 0; _woffHeader = new SvgWoffHeader(); _woffTables = null; var buffer = new byte[SvgWoffHeader.Woff1Size]; sizeRead = stream.Read(buffer, 0, (int)SvgWoffHeader.Woff1Size); Debug.Assert(sizeRead == SvgWoffHeader.Woff1Size); if (!_woffHeader.SetHeader(buffer)) { return(false); } _woffTables = new List <SvgWoffTableDirectory>(_woffHeader.NumTables); for (int i = 0; i < _woffHeader.NumTables; i++) { buffer = new byte[SvgWoffTableDirectory.Woff1Size]; sizeRead = stream.Read(buffer, 0, (int)SvgWoffTableDirectory.Woff1Size); Debug.Assert(sizeRead == SvgWoffTableDirectory.Woff1Size); var woffTable = new SvgWoffTableDirectory(); if (woffTable.SetHeader(buffer)) { _woffTables.Add(woffTable); } } for (int i = 0; i < _woffHeader.NumTables; i++) { var woffTable = _woffTables[i]; stream.Seek(woffTable.Offset, SeekOrigin.Begin); int bytesRead = 0; int bytesCount = (int)woffTable.CompLength; if (bytesCount == 0) { continue; } var tableBuffer = SvgWoffObject.ReadBytes(stream, bytesCount, out bytesRead); Debug.Assert(bytesRead == bytesCount); if (bytesRead != bytesCount) { return(false); } woffTable.CompTable = tableBuffer; if (woffTable.CompLength == woffTable.OrigLength) { // table data is not compressed woffTable.OrigTable = tableBuffer; } else { bytesCount = (int)woffTable.OrigLength; var origBuffer = new byte[bytesCount]; using (var zlibStream = new ZLibStream(new MemoryStream(tableBuffer), CompressionMode.Decompress, false)) { int bytesStart = 0; do { bytesRead = zlibStream.Read(origBuffer, bytesStart, bytesCount); if (bytesRead == 0) { break; } bytesStart += bytesRead; bytesCount -= bytesRead; } while (bytesCount > 0); } woffTable.OrigTable = origBuffer; } } _metadata = new SvgWoffMetadata(_woffHeader.MetaOffset, _woffHeader.MetaLength, _woffHeader.MetaOrigLength); if (_woffHeader.HasMetadata) { stream.Seek(_woffHeader.MetaOffset, SeekOrigin.Begin); int bytesRead = 0; int bytesCount = (int)_woffHeader.MetaLength; var metaBuffer = SvgWoffObject.ReadBytes(stream, bytesCount, out bytesRead); Debug.Assert(bytesRead == bytesCount); if (bytesRead != bytesCount) { return(false); } _metadata.Data = metaBuffer; _metadata.OrigData = metaBuffer; if (_woffHeader.MetaLength != _woffHeader.MetaOrigLength) { bytesCount = (int)_woffHeader.MetaOrigLength; var origBuffer = new byte[bytesCount]; using (var zlibStream = new ZLibStream(new MemoryStream(metaBuffer), CompressionMode.Decompress, false)) { int bytesStart = 0; do { bytesRead = zlibStream.Read(origBuffer, bytesStart, bytesCount); if (bytesRead == 0) { break; } bytesStart += bytesRead; bytesCount -= bytesRead; } while (bytesCount > 0); } _metadata.OrigData = origBuffer; } } _privateData = new SvgWoffPrivateData(_woffHeader.PrivateOffset, _woffHeader.PrivateLength); if (_woffHeader.HasPrivateData) { stream.Seek(_woffHeader.PrivateOffset, SeekOrigin.Begin); int bytesRead = 0; int bytesCount = (int)_woffHeader.PrivateLength; var privateBuffer = SvgWoffObject.ReadBytes(stream, bytesCount, out bytesRead); Debug.Assert(bytesRead == bytesCount); if (bytesRead != bytesCount) { return(false); } _privateData.Data = privateBuffer; } _woffVersion = 1; return(true); }