public async Task <byte[]> Save() { if (!this.WasCompressed) { return(this.Data); } byte[] data; try { using (MemoryStream inputStream = new MemoryStream(this.Data)) using (MemoryStream outputStream = new MemoryStream()) { await Lzss.Compress(inputStream, this.Data.Length, outputStream, original : true); data = outputStream.ToArray(); } } catch { data = new byte[0]; } return(data); }
public async Task Read(byte[] data) { this.Data = data; this.Accessed = true; if (data.Length == 0) { return; } if (data[0] != 0x11) { return; } try { using (var inputStream = new MemoryStream(data)) using (var outputStream = new MemoryStream()) { await Lzss.Decompress(inputStream, data.Length, outputStream); this.Data = outputStream.ToArray(); this.WasCompressed = true; } } catch { // ignored } }
private byte[] ReadStream(Stream stream, string path) { var key = FileList.GetIndex(path); if (key == -1) { throw new FileNotFoundException("Not found in archive (" + ArchivePath + "): " + path); } var entry = FileIndex.Entries[key]; if (entry.Length > int.MaxValue) { throw new NotImplementedException("Unable to read large file: " + path); } using (var reader = new BinaryReader(stream)) { stream.Seek(entry.Location, SeekOrigin.Begin); if (entry.Compression == 1) { var length = reader.ReadUInt32(); return(Lzss.Decompress(reader.ReadBytes((int)length))); } if (entry.Compression == 2) { var length = reader.ReadUInt32(); return(Lz4.Decompress(reader.ReadBytes((int)length))); } return(reader.ReadBytes((int)entry.Length)); } }
public async Task Decompress() { const string inPath = @"D:\Users\Arcanox\Documents\3DS\Pokemon X\Unpacked\Vanilla\ExeFS\.code.bin"; const string outPath = @"D:\Users\Arcanox\Documents\3DS\Pokemon X\Unpacked\Vanilla\ExeFS\code.decomp.bin"; using (var inStream = new FileStream(inPath, FileMode.Open, FileAccess.Read)) using (var tempStream = new MemoryStream()) using (var outStream = new FileStream(outPath, FileMode.Create, FileAccess.Write)) { await inStream.CopyToAsync(tempStream); var rData = tempStream.ToArray().Reverse().ToArray(); using (var temp2Stream = new MemoryStream(rData)) { await Lzss.Decompress(temp2Stream, temp2Stream.Length, outStream); } } }
private void ReceivePacket(byte[] bytes, int length) { ++receivedTotal; using (var stream = Bitstream.CreateWith(bytes, length)) { var type = stream.ReadUInt32(); if (type == COMPRESSED_PACKET) { var method = stream.ReadUInt32(); var compressed = new byte[length - 8]; stream.Read(compressed, 0, compressed.Length); var decompressed = Lzss.Decompress(compressed); ProcessPacket(decompressed, decompressed.Length); } else if (type == SPLIT_PACKET) { var request = stream.ReadUInt32(); var total = stream.ReadByte(); var index = stream.ReadByte(); var size = stream.ReadUInt16(); SplitPacket split; if (!splitPackets.ContainsKey(request)) { split = new SplitPacket { Request = request, Total = total, Received = 0, Data = new byte[total][], Present = new bool[total] }; splitPackets[request] = split; } else { split = splitPackets[request]; } var buffer = new byte[Math.Min(size, (stream.Remain + 7) / 8)]; stream.Read(buffer, 0, buffer.Length); split.Data[index] = buffer; if (!split.Present[index]) { ++split.Received; split.Present[index] = true; } if (split.Received == split.Total) { var full = split.Data.SelectMany(b => b).ToArray(); ReceivePacket(full, full.Length); splitPackets.Remove(request); } } else if (type == OOB_PACKET) { var data = new byte[stream.Length - 4]; stream.Read(data, 0, data.Length); receivedOutOfBand.Enqueue(data); } else { ProcessPacket(bytes, length); } } }
public static async Task <bool> GarcUnpack(string garcPath, string outPath, bool skipDecompression, bool supress = false) { if (!File.Exists(garcPath) && !supress) { return(false); } // Unpack the GARC GarcDef garc = await GarcUtil.UnpackGarc(garcPath); const string ext = "bin"; // Default Extension Name int fileCount = garc.Fatb.FileCount; string format = "D" + Math.Ceiling(Math.Log10(fileCount)); if (outPath == "gametext") { format = "D3"; } using (BinaryReader br = new BinaryReader(File.OpenRead(garcPath))) { // Create Extraction folder if it does not exist. if (!Directory.Exists(outPath)) { Directory.CreateDirectory(outPath); } // Pull out all the files for (int o = 0; o < garc.Fato.EntryCount; o++) { var entry = garc.Fatb.Entries[o]; // Set Entry File Name string fileName = o.ToString(format); #region OutDirectory Determination string parentFolder = entry.IsFolder ? Path.Combine(outPath, fileName) : outPath; if (entry.IsFolder) // Process Folder { Directory.CreateDirectory(parentFolder); } #endregion uint vector = entry.Vector; for (int i = 0; i < 32; i++) // For each bit in vector { var subEntry = entry.SubEntries[i]; if (!subEntry.Exists) { continue; } // Seek to Offset br.BaseStream.Position = subEntry.Start + garc.DataOffset; // Check if Compressed bool compressed = false; if (!skipDecompression) { try { compressed = (byte)br.PeekChar() == 0x11; } catch { // ignored } } // Write File string fileOut = Path.Combine(parentFolder, (entry.IsFolder ? i.ToString("00") : fileName) + "." + ext); using (BinaryWriter bw = new BinaryWriter(File.OpenWrite(fileOut))) { // Write out the data for the file br.BaseStream.Position = subEntry.Start + garc.DataOffset; bw.Write(br.ReadBytes(subEntry.Length)); } if (compressed) #region Decompression { string decout = Path.Combine(Path.GetDirectoryName(fileOut), "dec_" + Path.GetFileName(fileOut)); try { await Lzss.Decompress(fileOut, decout); try { File.Delete(fileOut); } catch (Exception) { // ignored } } catch { // File is really not encrypted. try { File.Delete(decout); } catch (Exception) { // ignored } } } #endregion if ((vector >>= 1) == 0) { break; } } } } return(true); }
public static async Task <bool> GarcPackMS(string folderPath, string garcPath, int version, int bytesPadding) { // Check to see if our input folder exists. if (!new DirectoryInfo(folderPath).Exists) { return(false); } if (version != Version4 && version != Version6) { throw new FormatException("Invalid GARC Version: 0x" + version.ToString("X4")); } // Okay some basic proofing is done. Proceed. int filectr = 0; // Get the paths of the files to pack up. string[] files = Directory.GetFiles(folderPath); string[] folders = Directory.GetDirectories(folderPath, "*.*", SearchOption.TopDirectoryOnly); string[] packOrder = new string[files.Length + folders.Length]; #region Reassemble a list of filenames. try { foreach (string f in files) { string fn = Path.GetFileNameWithoutExtension(f); if (fn == null) { continue; } int compressed = fn.IndexOf("dec_", StringComparison.Ordinal); int fileNumber = compressed < 0 ? int.Parse(fn) : int.Parse(fn.Substring(compressed + 4)); packOrder[fileNumber] = f; filectr++; } foreach (string f in folders) { packOrder[int.Parse(new DirectoryInfo(f).Name)] = f; filectr += Directory.GetFiles(f).Length; } } catch (Exception) { return(false); } #endregion // Set Up the GARC template. GarcDef garc = new GarcDef { ContentPadToNearest = 4, Fato = { // Magic = new[] { 'O', 'T', 'A', 'F' }, Entries = new GarcDef.FatoEntry[packOrder.Length], EntryCount = (ushort)packOrder.Length, HeaderSize = 0xC + packOrder.Length * 4, Padding = 0xFFFF }, Fatb = { // Magic = new[] { 'B', 'T', 'A', 'F' }, Entries = new GarcDef.FatbEntry[packOrder.Length], FileCount = filectr } }; if (version == Version6) { // Some files have larger bytes-to-pad values (ex/ 0x80 for a109) // Hopefully there's no problems defining this with a constant number. garc.ContentPadToNearest = 4; } #region Start Reassembling the FAT* tables. { int op = 0; int od = 0; int v = 0; for (int i = 0; i < garc.Fatb.Entries.Length; i++) { garc.Fato.Entries[i].Offset = op; // FATO offset garc.Fatb.Entries[i].SubEntries = new GarcDef.FatbSubEntry[32]; op += 4; // Vector if (!Directory.Exists(packOrder[i])) // is not folder { garc.Fatb.Entries[i].IsFolder = false; garc.Fatb.Entries[i].SubEntries[0].Exists = true; string fn = Path.GetFileNameWithoutExtension(packOrder[i]); if (fn == null) { continue; } int compressed = fn.IndexOf("dec_", StringComparison.Ordinal); int fileNumber = compressed < 0 ? int.Parse(fn) : int.Parse(fn.Substring(compressed + 4)); if (compressed >= 0) { string old = packOrder[i]; await Lzss.Compress(packOrder[i], packOrder[i] = Path.Combine(Path.GetDirectoryName(packOrder[i]), fileNumber.ToString())); File.Delete(old); } // Assemble Vector v = 1; // Assemble Entry FileInfo fi = new FileInfo(packOrder[i]); int actualLength = (int)(fi.Length % 4 == 0 ? fi.Length : fi.Length + 4 - fi.Length % 4); garc.Fatb.Entries[i].SubEntries[0].Start = od; garc.Fatb.Entries[i].SubEntries[0].End = actualLength + garc.Fatb.Entries[i].SubEntries[0].Start; garc.Fatb.Entries[i].SubEntries[0].Length = (int)fi.Length; od += actualLength; op += 12; } else { garc.Fatb.Entries[i].IsFolder = true; string[] subFiles = Directory.GetFiles(packOrder[i]); foreach (string f in subFiles) { string s = f; string fn = Path.GetFileNameWithoutExtension(f); if (fn == null) { continue; } int compressed = fn.IndexOf("dec_", StringComparison.Ordinal); int fileNumber = compressed < 0 ? int.Parse(fn) : int.Parse(fn.Substring(compressed + 4)); garc.Fatb.Entries[i].SubEntries[fileNumber].Exists = true; if (compressed >= 0) { await Lzss.Compress(f, s = Path.Combine(Path.GetDirectoryName(f), fileNumber.ToString())); File.Delete(f); } // Assemble Vector v |= 1 << fileNumber; // Assemble Entry FileInfo fi = new FileInfo(s); int actualLength = (int)(fi.Length % 4 == 0 ? fi.Length : fi.Length + 4 - fi.Length % 4); garc.Fatb.Entries[i].SubEntries[fileNumber].Start = od; garc.Fatb.Entries[i].SubEntries[fileNumber].End = actualLength + garc.Fatb.Entries[i].SubEntries[fileNumber].Start; garc.Fatb.Entries[i].SubEntries[fileNumber].Length = (int)fi.Length; od += actualLength; op += 12; } } garc.Fatb.Entries[i].Vector = (uint)v; } garc.Fatb.HeaderSize = 0xC + op; } #endregion // Delete the old garc if it exists, then begin writing our new one try { File.Delete(garcPath); } catch { // ignored } // Set up the Header Info using (var newGarc = new FileStream(garcPath, FileMode.Create)) using (var ms = new MemoryStream()) using (BinaryWriter gw = new BinaryWriter(ms)) { #region Write GARC Headers // Write GARC gw.Write((uint)0x47415243); // GARC gw.Write((uint)(version == Version6 ? 0x24 : 0x1C)); // Header Length gw.Write((ushort)0xFEFF); // Endianness BOM gw.Write((ushort)version); // Version gw.Write((uint)0x00000004); // Section Count (4) gw.Write((uint)0x00000000); // Data Offset (temp) gw.Write((uint)0x00000000); // File Length (temp) gw.Write((uint)0x00000000); // Largest File Size (temp) if (version == Version6) { gw.Write((uint)0x0); gw.Write((uint)0x0); } // Write FATO gw.Write((uint)0x4641544F); // FATO gw.Write(garc.Fato.HeaderSize); // Header Size gw.Write(garc.Fato.EntryCount); // Entry Count gw.Write(garc.Fato.Padding); // Padding for (int i = 0; i < garc.Fato.Entries.Length; i++) { gw.Write((uint)garc.Fato.Entries[i].Offset); } // Write FATB gw.Write((uint)0x46415442); // FATB gw.Write(garc.Fatb.HeaderSize); // Header Size gw.Write(garc.Fatb.FileCount); // File Count foreach (var e in garc.Fatb.Entries) { gw.Write(e.Vector); foreach (var s in e.SubEntries.Where(s => s.Exists)) { gw.Write((uint)s.Start); gw.Write((uint)s.End); gw.Write((uint)s.Length); } } // Write FIMB gw.Write((uint)0x46494D42); // FIMB gw.Write((uint)0x0000000C); // Header Length var dataLen = gw.BaseStream.Position; gw.Write((uint)0); // Data Length - TEMP gw.Seek(0x10, SeekOrigin.Begin); // Goto the start of the un-set 0 data we set earlier and set it. var hdrLen = gw.BaseStream.Position; gw.Write((uint)0); // Write Data Offset - TEMP gw.Write((uint)0); // Write total GARC Length - TEMP // Write Handling information if (version == Version4) { gw.Write(garc.ContentLargestUnpadded); // Write Largest File stat } else if (version == Version6) { gw.Write(garc.ContentLargestPadded); // Write Largest With Padding gw.Write(garc.ContentLargestUnpadded); // Write Largest Without Padding gw.Write(garc.ContentPadToNearest); } newGarc.Seek(0, SeekOrigin.End); // Goto the end so we can copy the filedata after the GARC headers. #endregion #region Write Files var ghLength = gw.BaseStream.Length; long largestSize = 0; // Required memory to allocate to handle the largest file long largestPadded = 0; // Required memory to allocate to handle the largest PADDED file (Ver6 only) foreach (string e in packOrder) { string[] fa = Directory.Exists(e) ? Directory.GetFiles(e) : new[] { e }; foreach (string f in fa) { // Update largest file length if necessary long len = new FileInfo(f).Length; int padding = (int)(len % bytesPadding); if (padding != 0) { padding = bytesPadding - padding; } bool largest = len > largestSize; if (largest) { largestSize = len; largestPadded = len + padding; } // Write to FIMB using (var x = File.OpenRead(f)) x.CopyTo(newGarc); // While length is not divisible by 4, pad with FF (unused byte) while (padding-- > 0) { gw.Write((byte)0xFF); } } } garc.ContentLargestUnpadded = (uint)largestSize; garc.ContentLargestPadded = (uint)largestPadded; var gdLength = gw.BaseStream.Length - ghLength; #endregion gw.Seek((int)dataLen, SeekOrigin.Begin); gw.Write((uint)gdLength); // Data Length gw.Seek((int)hdrLen, SeekOrigin.Begin); gw.Write((uint)ghLength); // Write Data Offset gw.Write((uint)gw.BaseStream.Length); // Write total GARC Length // Write Handling information switch (version) { case Version4: gw.Write(garc.ContentLargestUnpadded); // Write Largest File stat break; case Version6: gw.Write(garc.ContentLargestPadded); // Write Largest With Padding gw.Write(garc.ContentLargestUnpadded); // Write Largest Without Padding gw.Write(garc.ContentPadToNearest); break; } await ms.CopyToAsync(newGarc); return(true); } }