public GARCEntry(byte[] data) { Data = data; Accessed = true; if (data.Length == 0) { return; } if (data[0] != 0x11) { return; } try { using (MemoryStream newMS = new MemoryStream()) { LZSS.Decompress(new MemoryStream(data), data.Length, newMS); Data = newMS.ToArray(); } WasCompressed = true; } catch { } }
public byte[] Save() { if (!WasCompressed) { return(Data); } byte[] data; try { using MemoryStream newMS = new MemoryStream(); LZSS.Compress(new MemoryStream(Data), Data.Length, newMS, original: true); data = newMS.ToArray(); } catch { data = Array.Empty <byte>(); } return(data); }
public static int garcPackMS(string folderPath, string garcPath, int version, int bytesPadding) { // Check to see if our input folder exists. if (!new DirectoryInfo(folderPath).Exists) { throw new DirectoryNotFoundException("Folder does not exist"); } if (version != VER_4 && version != VER_6) { throw new FormatException("Invalid GARC Version: 0x" + version.ToString("X4")); } // Okay some basic proofing is done. Proceed. int filectr = 0; // Get the paths of the files to pack up. string[] files = Directory.GetFiles(folderPath); string[] folders = Directory.GetDirectories(folderPath, "*.*", SearchOption.TopDirectoryOnly); string[] packOrder = new string[files.Length + folders.Length]; #region Reassemble a list of filenames. try { foreach (string f in files) { string fn = Path.GetFileNameWithoutExtension(f); int compressed = fn.IndexOf("dec_", StringComparison.Ordinal); int fileNumber = compressed < 0 ? int.Parse(fn) : int.Parse(fn.Substring(compressed + 4)); packOrder[fileNumber] = f; filectr++; } foreach (string f in folders) { packOrder[int.Parse(new DirectoryInfo(f).Name)] = f; filectr += Directory.GetFiles(f).Length; } } catch (Exception e) { throw new Exception("Invalid packing filenames", e); } FileCountDetermined?.Invoke(null, new FileCountDeterminedEventArgs { Total = filectr }); #endregion // Set Up the GARC template. GARCFile garc = new GARCFile { ContentPadToNearest = 4, fato = { // Magic = new[] { 'O', 'T', 'A', 'F' }, Entries = new FATO_Entry[packOrder.Length], EntryCount = (ushort)packOrder.Length, HeaderSize = 0xC + (packOrder.Length * 4), Padding = 0xFFFF }, fatb = { // Magic = new[] { 'B', 'T', 'A', 'F' }, Entries = new FATB_Entry[packOrder.Length], FileCount = filectr } }; if (version == VER_6) { // Some files have larger bytes-to-pad values (ex/ 0x80 for a109) // Hopefully there's no problems defining this with a constant number. garc.ContentPadToNearest = 4; } #region Start Reassembling the FAT* tables. { int op = 0; int od = 0; int v = 0; int index = 0; for (int i = 0; i < garc.fatb.Entries.Length; i++) { garc.fato.Entries[i].Offset = op; // FATO offset garc.fatb.Entries[i].SubEntries = new FATB_SubEntry[32]; op += 4; // Vector if (!Directory.Exists(packOrder[i])) // is not folder { garc.fatb.Entries[i].IsFolder = false; garc.fatb.Entries[i].SubEntries[0].Exists = true; string fn = Path.GetFileNameWithoutExtension(packOrder[i]); int compressed = fn.IndexOf("dec_", StringComparison.Ordinal); int fileNumber = compressed < 0 ? int.Parse(fn) : int.Parse(fn.Substring(compressed + 4)); if (compressed >= 0) { string old = packOrder[i]; LZSS.Compress(packOrder[i], packOrder[i] = Path.Combine(Path.GetDirectoryName(packOrder[i]), fileNumber.ToString())); File.Delete(old); } // Assemble Vector v = 1; // Assemble Entry FileInfo fi = new FileInfo(packOrder[i]); int actualLength = (int)(fi.Length % 4 == 0 ? fi.Length : fi.Length + 4 - (fi.Length % 4)); garc.fatb.Entries[i].SubEntries[0].Start = od; garc.fatb.Entries[i].SubEntries[0].End = actualLength + garc.fatb.Entries[i].SubEntries[0].Start; garc.fatb.Entries[i].SubEntries[0].Length = (int)fi.Length; od += actualLength; op += 12; // Step PackProgressed?.Invoke(null, new PackProgressedEventArgs { Current = index++, Total = filectr, CurrentFile = packOrder[i] }); } else { garc.fatb.Entries[i].IsFolder = true; string[] subFiles = Directory.GetFiles(packOrder[i]); foreach (string f in subFiles) { string s = f; string fn = Path.GetFileNameWithoutExtension(f); int compressed = fn.IndexOf("dec_", StringComparison.Ordinal); int fileNumber = compressed < 0 ? int.Parse(fn) : int.Parse(fn.Substring(compressed + 4)); garc.fatb.Entries[i].SubEntries[fileNumber].Exists = true; if (compressed >= 0) { LZSS.Compress(f, s = Path.Combine(Path.GetDirectoryName(f), fileNumber.ToString())); File.Delete(f); } // Assemble Vector v |= 1 << fileNumber; // Assemble Entry FileInfo fi = new FileInfo(s); int actualLength = (int)(fi.Length % 4 == 0 ? fi.Length : fi.Length + 4 - (fi.Length % 4)); garc.fatb.Entries[i].SubEntries[fileNumber].Start = od; garc.fatb.Entries[i].SubEntries[fileNumber].End = actualLength + garc.fatb.Entries[i].SubEntries[fileNumber].Start; garc.fatb.Entries[i].SubEntries[fileNumber].Length = (int)fi.Length; od += actualLength; op += 12; // Step PackProgressed?.Invoke(null, new PackProgressedEventArgs { Current = index++, Total = filectr, CurrentFile = packOrder[i] }); } } garc.fatb.Entries[i].Vector = (uint)v; } garc.fatb.HeaderSize = 0xC + op; } #endregion // Delete the old garc if it exists, then begin writing our new one try { File.Delete(garcPath); } catch { } // Set up the Header Info using (var newGARC = new FileStream(garcPath, FileMode.Create)) using (BinaryWriter gw = new BinaryWriter(newGARC)) { #region Write GARC Headers // Write GARC gw.Write((uint)0x47415243); // GARC gw.Write((uint)(version == VER_6 ? 0x24 : 0x1C)); // Header Length gw.Write((ushort)0xFEFF); // Endianness BOM gw.Write((ushort)version); // Version gw.Write((uint)0x00000004); // Section Count (4) gw.Write((uint)0x00000000); // Data Offset (temp) gw.Write((uint)0x00000000); // File Length (temp) gw.Write((uint)0x00000000); // Largest File Size (temp) if (version == VER_6) { gw.Write((uint)0x0); gw.Write((uint)0x0); } // Write FATO gw.Write((uint)0x4641544F); // FATO gw.Write(garc.fato.HeaderSize); // Header Size gw.Write(garc.fato.EntryCount); // Entry Count gw.Write(garc.fato.Padding); // Padding for (int i = 0; i < garc.fato.Entries.Length; i++) { gw.Write((uint)garc.fato.Entries[i].Offset); } // Write FATB gw.Write((uint)0x46415442); // FATB gw.Write(garc.fatb.HeaderSize); // Header Size gw.Write(garc.fatb.FileCount); // File Count foreach (var e in garc.fatb.Entries) { gw.Write(e.Vector); foreach (var s in e.SubEntries.Where(s => s.Exists)) { gw.Write((uint)s.Start); gw.Write((uint)s.End); gw.Write((uint)s.Length); } } // Write FIMB gw.Write((uint)0x46494D42); // FIMB gw.Write((uint)0x0000000C); // Header Length var dataLen = gw.BaseStream.Position; gw.Write((uint)0); // Data Length - TEMP gw.Seek(0x10, SeekOrigin.Begin); // Goto the start of the un-set 0 data we set earlier and set it. var hdrLen = gw.BaseStream.Position; gw.Write((uint)0); // Write Data Offset - TEMP gw.Write((uint)0); // Write total GARC Length - TEMP // Write Handling information if (version == VER_4) { gw.Write(garc.ContentLargestUnpadded); // Write Largest File stat } else if (version == VER_6) { gw.Write(garc.ContentLargestPadded); // Write Largest With Padding gw.Write(garc.ContentLargestUnpadded); // Write Largest Without Padding gw.Write(garc.ContentPadToNearest); } newGARC.Seek(0, SeekOrigin.End); // Goto the end so we can copy the filedata after the GARC headers. #endregion #region Write Files var ghLength = gw.BaseStream.Length; long largestSize = 0; // Required memory to allocate to handle the largest file long largestPadded = 0; // Required memory to allocate to handle the largest PADDED file (Ver6 only) foreach (string e in packOrder) { string[] fa = Directory.Exists(e) ? Directory.GetFiles(e) : new[] { e }; foreach (string f in fa) { // Update largest file length if necessary long len = new FileInfo(f).Length; int padding = (int)(len % bytesPadding); if (padding != 0) { padding = bytesPadding - padding; } bool largest = len > largestSize; if (largest) { largestSize = len; largestPadded = len + padding; } // Write to FIMB using (var x = File.OpenRead(f)) x.CopyTo(newGARC); // While length is not divisible by 4, pad with FF (unused byte) while (padding-- > 0) { gw.Write((byte)0xFF); } } } garc.ContentLargestUnpadded = (uint)largestSize; garc.ContentLargestPadded = (uint)largestPadded; var gdLength = gw.BaseStream.Length - ghLength; #endregion gw.Seek((int)dataLen, SeekOrigin.Begin); gw.Write((uint)gdLength); // Data Length gw.Seek((int)hdrLen, SeekOrigin.Begin); gw.Write((uint)ghLength); // Write Data Offset gw.Write((uint)gw.BaseStream.Length); // Write total GARC Length // Write Handling information if (version == VER_4) { gw.Write(garc.ContentLargestUnpadded); // Write Largest File stat } else if (version == VER_6) { gw.Write(garc.ContentLargestPadded); // Write Largest With Padding gw.Write(garc.ContentLargestUnpadded); // Write Largest Without Padding gw.Write(garc.ContentPadToNearest); } return(filectr); } }
public static int garcUnpack(string garcPath, string outPath, bool skipDecompression) { if (!File.Exists(garcPath)) { throw new FileNotFoundException("File does not exist"); } // Unpack the GARC GARCFile garc = unpackGARC(garcPath); const string ext = "bin"; // Default Extension Name int fileCount = garc.fatb.FileCount; string format = "D" + Math.Ceiling(Math.Log10(fileCount)); if (outPath == "gametext") { format = "D3"; } FileCountDetermined?.Invoke(null, new FileCountDeterminedEventArgs { Total = fileCount }); using (BinaryReader br = new BinaryReader(File.OpenRead(garcPath))) { // Create Extraction folder if it does not exist. if (!Directory.Exists(outPath)) { Directory.CreateDirectory(outPath); } int filectr = 0; // Pull out all the files for (int o = 0; o < garc.fato.EntryCount; o++) { var Entry = garc.fatb.Entries[o]; // Set Entry File Name string fileName = o.ToString(format); #region OutDirectory Determination string parentFolder = Entry.IsFolder ? Path.Combine(outPath, fileName) : outPath; if (Entry.IsFolder) // Process Folder { Directory.CreateDirectory(parentFolder); } #endregion uint vector = Entry.Vector; for (int i = 0; i < 32; i++) // For each bit in vector { var SubEntry = Entry.SubEntries[i]; if (!SubEntry.Exists) { continue; } // Seek to Offset br.BaseStream.Position = SubEntry.Start + garc.DataOffset; // Check if Compressed bool compressed = false; if (!skipDecompression) { try { compressed = (byte)br.PeekChar() == 0x11; } catch { } } // Write File string fileOut = Path.Combine(parentFolder, (Entry.IsFolder ? i.ToString("00") : fileName) + "." + ext); using (BinaryWriter bw = new BinaryWriter(File.OpenWrite(fileOut))) { // Write out the data for the file br.BaseStream.Position = SubEntry.Start + garc.DataOffset; bw.Write(br.ReadBytes(SubEntry.Length)); filectr++; } if (compressed) #region Decompression { string decout = Path.Combine(Path.GetDirectoryName(fileOut), "dec_" + Path.GetFileName(fileOut)); try { LZSS.Decompress(fileOut, decout); try { File.Delete(fileOut); } catch (Exception e) { throw new Exception("A compressed file could not be deleted: " + fileOut, e); } } catch { // File is really not encrypted. File.Delete(decout); } } #endregion UnpackProgressed?.Invoke(null, new UnpackProgressedEventArgs { Current = filectr, Total = fileCount }); if ((vector >>= 1) == 0) { break; } } } } return(fileCount); }