// Rebuilding attempts. Almost works, but needs to handle embedded LS entries. private static unsafe void RebuildArchive(string patchFolder) { LSEntryObject _resMain = lsFile.Entries[calc_crc("resource")]; byte[] resource = GetFileDataDecompressed(_resMain.DTOffset + (uint)_resMain.PaddingLength, _resMain.Size, _resMain.DTIndex); File.WriteAllBytes("resource", resource); Console.WriteLine($"Rebuilding archive..."); RFFile rfFile = new RFFile("resource"); var pathParts = new string[20]; var offsetParts = new LSEntryObject[20]; using (FileStream strm = File.Create("dt_rebuild")) { int lsSize = 0; foreach (ResourceEntryObject rsobj in rfFile.ResourceEntries.Where(rsobj => rsobj != null)) { pathParts[rsobj.FolderDepth - 1] = rsobj.EntryString; Array.Clear(pathParts, rsobj.FolderDepth, pathParts.Length - (rsobj.FolderDepth + 1)); var path = string.Join("", pathParts); LSEntryObject fileEntry; if (rsobj.HasPack) { // if this is the end of a pack, update info and write end padding if (offsetParts.Where(x => x != null).ToArray().Length > 0) { LSEntryObject lsentry = offsetParts.Last(x => x != null); int align = lsSize.RoundUp(0x10) - lsSize; if (lsentry.DTOffset != strm.Position + 0x80) for (int i = 0; i < 0x60 + align; i++) strm.WriteByte(0xbb); lsentry.Size = lsSize; } // grab a new pack using the filepath we've built then write start padding. lsSize = 0; var crcPath = $"data/{path.TrimEnd('/') + (rsobj.Compressed ? "/packed" : "")}"; var crc = calc_crc(crcPath); lsFile.Entries.TryGetValue(crc, out fileEntry); fileEntry.DTOffset = (uint)strm.Position; for (int i = 0; i < 0x80; i++, lsSize++) strm.WriteByte(0xcc); } else fileEntry = null; offsetParts[rsobj.FolderDepth - 1] = fileEntry; Array.Clear(offsetParts, rsobj.FolderDepth, offsetParts.Length - (rsobj.FolderDepth)); if (!path.EndsWith("/")) if (File.Exists($"{patchFolder}/{path}")) { LSEntryObject lsentry = offsetParts.Last(x => x != null); Console.WriteLine($"{patchFolder}/{path}"); Logstream.WriteLine($"{patchFolder}/{path}"); byte[] raw = File.ReadAllBytes($"{patchFolder}/{path}"); byte[] compressed = Util.Compress(raw); int align = compressed.Length.RoundUp(0x10) - compressed.Length; long off = strm.Position - lsentry.DTOffset; strm.Write(compressed, 0, compressed.Length); // write file borders for (int i = 0; i < 0x20 + align; i++) strm.WriteByte(0xcc); lsSize += compressed.Length + 0x20 + align; rsobj.CmpSize = compressed.Length; rsobj.DecSize = raw.Length; rsobj.OffInPack = (uint)off; } } var entry = offsetParts.Last(x => x != null); entry.DTOffset = (uint)strm.Position - (uint)lsSize; entry.Size = lsSize; // Update resource and LS files. rfFile.UpdateEntries(); byte[] dec = rfFile._workingSource.Slice((int)rfFile.Header.HeaderLen1, (int)(rfFile._workingSource.Length - rfFile.Header.HeaderLen1)); byte[] cmp = Util.Compress(dec); rfFile.Header.CompressedLen = (uint)cmp.Length; rfFile.Header.DecompressedLen = (uint)dec.Length; byte[] header = rfFile.Header.ToArray(); byte[] full = header.Concat(cmp).ToArray(); // Patch the resource data back into the DT file. long rOff = strm.Position; strm.Write(full, 0, full.Length); rfFile._workingSource.Close(); lsFile.Entries[calc_crc("resource")].Size = full.Length; lsFile.Entries[calc_crc("resource")].DTOffset = (uint)rOff; lsFile.Entries[calc_crc("resource(us_en)")].Size = full.Length; lsFile.Entries[calc_crc("resource(us_en)")].DTOffset = (uint)rOff; lsFile.Entries[calc_crc("resource(us_fr)")].Size = full.Length; lsFile.Entries[calc_crc("resource(us_fr)")].DTOffset = (uint)rOff; lsFile.Entries[calc_crc("resource(us_sp)")].Size = full.Length; lsFile.Entries[calc_crc("resource(us_sp)")].DTOffset = (uint)rOff; lsFile.UpdateEntries(); } if (File.Exists("resource")) File.Delete("resource"); if (File.Exists("resource.dec")) File.Delete("resource.dec"); }
/// <summary> /// Unpacks data from the game archive using the default resource file. /// </summary> /// <param name="resourceStr">The resource file to use in extraction</param> private static void Unpack_Resource(string resourceStr) { string region = ""; if (resourceStr.Contains("(")) region = resourceStr.Substring(resourceStr.IndexOf("(", StringComparison.Ordinal), 7); LSEntryObject _resource = lsFile.Entries[calc_crc(resourceStr)]; File.WriteAllBytes(resourceStr, GetFileDataDecompressed(_resource.DTOffset + (uint)_resource.PaddingLength, _resource.Size, _resource.DTIndex)); Console.WriteLine($"Parsing {resourceStr} file.."); RFFile rfFile = new RFFile(resourceStr); var pathParts = new string[20]; var offsetParts = new LSEntryObject[20]; foreach (ResourceEntryObject rsobj in rfFile.ResourceEntries) { if (rsobj == null) continue; if (region != "" && rsobj.inPatch) continue; pathParts[rsobj.FolderDepth - 1] = rsobj.EntryString; Array.Clear(pathParts, rsobj.FolderDepth, pathParts.Length - (rsobj.FolderDepth)); var path = string.Join("", pathParts); LSEntryObject fileEntry; if (rsobj.HasPack) { var crcPath = $"data/{path.TrimEnd('/') + (rsobj.Compressed ? "/packed" : "")}"; Console.WriteLine(crcPath); Logstream.WriteLine(crcPath); var crc = calc_crc(crcPath); lsFile.Entries.TryGetValue(crc, out fileEntry); lsFile.Entries.Remove(crc); } else fileEntry = null; offsetParts[rsobj.FolderDepth - 1] = fileEntry; Array.Clear(offsetParts, rsobj.FolderDepth, offsetParts.Length - (rsobj.FolderDepth)); if (!path.EndsWith("/")) { LSEntryObject lsentry = offsetParts.LastOrDefault(x => x != null); if (lsentry == null) continue; var fileData = new byte[0]; if (rsobj.CmpSize > 0) fileData = GetFileDataDecompressed(lsentry.DTOffset + rsobj.OffInPack, rsobj.CmpSize, lsentry.DTIndex); if (fileData.Length != rsobj.DecSize) { Console.WriteLine("Error: File length doesn't match specified decompressed length, skipping"); Logstream.WriteLine( "Error: File length doesn't match specified decompressed length, skipping"); continue; } var folder = Path.GetDirectoryName($"data{region}/{path}"); if (!Directory.Exists(folder)) Directory.CreateDirectory(folder); File.WriteAllBytes($"data{region}/{path}", fileData); } } //totalsize += _resource.Size; // clean up rfFile._workingSource.Close(); if (File.Exists($"resource{region}.dec")) File.Delete($"resource{region}.dec"); if (File.Exists($"resource{region}")) File.Delete($"resource{region}"); for (int i = 0; i < lsFile.Entries.Count; i++) { Console.Clear(); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Missing hashes:\n"); Console.WriteLine(lsFile.Entries.Keys[i].ToString("X8")); } Console.ReadLine(); Logstream.Close(); }
private static void Unpack_update(string resFile) { Console.WriteLine("Parsing resource file.."); RFFile rfFile = new RFFile(resFile); var pathParts = new string[20]; DataSource _curPacked = new DataSource(); string mainfolder = ""; string region = ""; if (resFile.Contains("(")) region = resFile.Substring(resFile.IndexOf("(", StringComparison.Ordinal), 7); foreach (ResourceEntryObject rsobj in rfFile.ResourceEntries) { if (rsobj == null) continue; pathParts[rsobj.FolderDepth - 1] = rsobj.EntryString; Array.Clear(pathParts, rsobj.FolderDepth, pathParts.Length - (rsobj.FolderDepth + 1)); var path = $"data{region}/{string.Join("", pathParts)}"; if (rsobj.HasPack) { path += (rsobj.Compressed ? "packed" : ""); if (File.Exists(path)) { _curPacked = new DataSource(FileMap.FromFile(path)); mainfolder = path.Remove(path.Length - 6); } continue; } if (!(rsobj.inPatch && path.Contains(mainfolder) && !string.IsNullOrEmpty(mainfolder))) continue; if (path.EndsWith("/")) { if (!Directory.Exists(path)) Directory.CreateDirectory(path); } else { var fileData = new byte[0]; if (rsobj.CmpSize > 0) { byte[] tmp = _curPacked.Slice((int)rsobj.OffInPack, 4); if (tmp[0] == 0x78 && tmp[1] == 0x9c) fileData = Util.DeCompress(_curPacked.Slice((int)rsobj.OffInPack, (int)rsobj.CmpSize)); else fileData = _curPacked.Slice((int)rsobj.OffInPack, (int)rsobj.DecSize); } Console.WriteLine(path); Logstream.WriteLine($"{path} : size: {rsobj.DecSize:X8}"); if (fileData.Length != rsobj.DecSize) { Console.WriteLine("Error: File length doesn't match specified decompressed length, quiting"); Logstream.WriteLine("Error: File length doesn't match specified decompressed length, quiting"); return; } File.WriteAllBytes(path, fileData); } } Logstream.Close(); rfFile._workingSource.Close(); Console.WriteLine("Extraction finished."); if (File.Exists($"resource{region}.dec")) File.Delete($"resource{region}.dec"); }
private static unsafe void PatchArchive(string resourceString, string patchFolder) { LSEntryObject _resource = lsFile.Entries[calc_crc(resourceString)]; byte[] resource = GetFileDataDecompressed(_resource.DTOffset + (uint)_resource.PaddingLength, _resource.Size, _resource.DTIndex); File.WriteAllBytes(resourceString, resource); Console.WriteLine($"Patching {resourceString}"); RFFile rfFile = new RFFile(resourceString); var pathParts = new string[20]; var offsetParts = new LSEntryObject[20]; foreach (ResourceEntryObject rsobj in rfFile.ResourceEntries) { if (rsobj == null) continue; pathParts[rsobj.FolderDepth - 1] = rsobj.EntryString; Array.Clear(pathParts, rsobj.FolderDepth, pathParts.Length - (rsobj.FolderDepth + 1)); var path = string.Join("", pathParts); LSEntryObject fileEntry; if (rsobj.HasPack) { var crcPath = $"data/{path.TrimEnd('/') + (rsobj.Compressed ? "/packed" : "")}"; var crc = calc_crc(crcPath); lsFile.Entries.TryGetValue(crc, out fileEntry); } else fileEntry = null; offsetParts[rsobj.FolderDepth - 1] = fileEntry; Array.Clear(offsetParts, rsobj.FolderDepth, offsetParts.Length - (rsobj.FolderDepth + 1)); if (!path.EndsWith("/")) if (File.Exists($"{patchFolder}/{path}")) { Console.WriteLine($"Patch found: {patchFolder}/{path}"); Logstream.WriteLine($"Patch found: {patchFolder}/{path}"); LSEntryObject lsentry = offsetParts.Last(x => x != null); byte[] raw = File.ReadAllBytes($"{patchFolder}/{path}"); byte[] compressed = Util.Compress(raw); if (compressed.Length > rsobj.CmpSize + 1) { Console.WriteLine("Patching files larger than original not yet supported, skipping"); continue; } rsobj.CmpSize = compressed.Length; rsobj.DecSize = raw.Length; uint difference = 0; DataSource src = GetFileChunk(lsentry.DTOffset, lsentry.Size, lsentry.DTIndex, out difference); VoidPtr addr = src.Address + difference; addr += rsobj.OffInPack; for (int i = 0; i < compressed.Length; i++) *(byte*)(addr + i) = compressed[i]; // write 0xCC over unused bytes. addr += compressed.Length; int truncateBytes = (int)rsobj.CmpSize - compressed.Length; for (int i = 0; i < truncateBytes; i++) *(byte*)(addr + i) = 0xCC; src.Close(); } } // Update resource and LS files. rfFile.UpdateEntries(); byte[] dec = rfFile._workingSource.Slice((int)rfFile.Header.HeaderLen1, (int)(rfFile._workingSource.Length - rfFile.Header.HeaderLen1)); byte[] cmp = Util.Compress(dec); rfFile.Header.CompressedLen = (uint)cmp.Length; rfFile.Header.DecompressedLen = (uint)dec.Length; byte[] header = rfFile.Header.ToArray(); byte[] full = header.Concat(cmp).ToArray(); lsFile.Entries[calc_crc(resourceString)].Size = full.Length; lsFile.UpdateEntries(); // Patch the resource data back into the DT file. uint diff; DataSource rSource = GetFileChunk(_resource.DTOffset, _resource.Size, _resource.DTIndex, out diff); VoidPtr curAddr = rSource.Address + diff; for (int i = 0; i < full.Length; i++) { *(byte*)(curAddr + i) = full[i]; } rSource.Close(); rfFile._workingSource.Close(); if (File.Exists(resourceString)) File.Delete(resourceString); if (File.Exists(resourceString + ".dec")) File.Delete(resourceString + ".dec"); }
public ResourceCollection LoadRFFile(string rfFilePath, LSFile lsFile) { string region = Utils.GetRegionFromFilename(rfFilePath); PatchFileItem currentPackedPatchFile = null; //Create new ResourceCollection ResourceCollection resCol = new ResourceCollection("data" + region); //Use temp folder instead of patch folder string tempRF = _TempFolder + Path.GetFileName(rfFilePath); File.Copy(rfFilePath, tempRF, true); //Load RF file RFFile rfFile = new RFFile(tempRF); string[] pathParts = new string[20]; LSEntry[] offsetParts = new LSEntry[20]; ResourceItem[] pathPartsRes = new ResourceItem[20]; foreach (ResourceEntry rEntry in rfFile.ResourceEntries) { if (rEntry == null || string.IsNullOrEmpty(rEntry.EntryString)) { continue; } if (IsJunkEntry(rEntry)) { continue; } //Figuring out the path of the entry pathParts[rEntry.FolderDepth - 1] = rEntry.EntryString; Array.Clear(pathParts, rEntry.FolderDepth, pathParts.Length - (rEntry.FolderDepth)); //New ResourceItem object ResourceItem rItem = new ResourceItem(resCol, rEntry.EntryString, rEntry.OffInPack, (uint)rEntry.CmpSize, (uint)rEntry.DecSize, rEntry.Packed, string.Join(string.Empty, pathParts)); //For Treeview pathPartsRes[rEntry.FolderDepth - 1] = rItem; Array.Clear(pathPartsRes, rEntry.FolderDepth, pathPartsRes.Length - (rEntry.FolderDepth)); if (rEntry.FolderDepth == 1) { resCol.Nodes.Add(rItem); } else { pathPartsRes[rEntry.FolderDepth - 2].Nodes.Add(rItem); } LSEntry fileEntry = null; PatchFileItem patchItem = null; if (rEntry.Packed) { //Check if part of the patch/mod patchItem = _PatchFileList.GetPatchFileItem(rItem.AbsolutePath); if (patchItem != null) { if (patchItem.Packed) { currentPackedPatchFile = patchItem; } } //Part of LS else { currentPackedPatchFile = null; string crcPath = rItem.AbsolutePath.TrimEnd('/') + (rEntry.Directory ? "/packed" : ""); uint crc = calc_crc(crcPath); lsFile.Entries.TryGetValue(crc, out fileEntry); if (fileEntry == null) { rItem.Source = FileSource.NotFound; } else { lsFile.Entries.Remove(crc); rItem.Source = FileSource.LS; } } } //Check if part of the patch/mod if (currentPackedPatchFile != null && rItem.AbsolutePath.StartsWith(currentPackedPatchFile.AbsolutePath)) { rItem.Source = FileSource.Patch; } else if (patchItem != null) { rItem.Source = FileSource.Patch; } //Part of LS else { currentPackedPatchFile = null; offsetParts[rEntry.FolderDepth - 1] = fileEntry; Array.Clear(offsetParts, rEntry.FolderDepth, offsetParts.Length - (rEntry.FolderDepth)); if (!rItem.AbsolutePath.EndsWith("/")) { rItem.LSEntryInfo = offsetParts.LastOrDefault(x => x != null); if (rItem.LSEntryInfo == null) { rItem.Source = FileSource.NotFound; } else { rItem.Source = FileSource.LS; } } } rItem.PatchItem = currentPackedPatchFile; if (rItem.Source != FileSource.NotFound) { for (int i = 0; i < pathParts.Length; i++) { if (pathParts[i] != null) { string folder = string.Join(string.Empty, pathParts, 0, i); if (!resCol.Resources.ContainsKey(folder)) { continue; } if (rItem.Source == FileSource.LS && resCol.Resources[folder].Source == FileSource.Patch) { resCol.Resources[folder].Source = rItem.Source; } else if (resCol.Resources[folder].Source == FileSource.NotFound) { resCol.Resources[folder].Source = rItem.Source; } } } } rItem.OriginalFlags = rEntry.Flags; resCol.Resources.Add(rItem.RelativePath, rItem); } rfFile.WorkingSource.Close(); rfFile.CompressedSource.Close(); if (File.Exists(tempRF)) { File.Delete(tempRF); } if (File.Exists(tempRF + ".dec")) { File.Delete(tempRF + ".dec"); } LogHelper.Info(string.Format("{0} entries: {1}", resCol.ResourceName, resCol.Resources.Count)); return(resCol); }
/// <summary> /// Unpacks data from the game archive using the default resource file. /// </summary> /// <param name="resourceStr">The resource file to use in extraction</param> private static void Unpack_Resource(string resourceStr) { string region = ""; if (resourceStr.Contains("(")) { region = resourceStr.Substring(resourceStr.IndexOf("(", StringComparison.Ordinal), 7); } LSEntryObject _resource = lsFile.Entries[calc_crc(resourceStr)]; File.WriteAllBytes(resourceStr, GetFileDataDecompressed(_resource.DTOffset + (uint)_resource.PaddingLength, _resource.Size, _resource.DTIndex)); Console.WriteLine($"Parsing {resourceStr} file.."); RFFile rfFile = new RFFile(resourceStr); var pathParts = new string[20]; var offsetParts = new LSEntryObject[20]; foreach (ResourceEntryObject rsobj in rfFile.ResourceEntries) { if (rsobj == null) { continue; } if (region != "" && rsobj.inPatch) { continue; } pathParts[rsobj.FolderDepth - 1] = rsobj.EntryString; Array.Clear(pathParts, rsobj.FolderDepth, pathParts.Length - (rsobj.FolderDepth)); var path = string.Join("", pathParts); LSEntryObject fileEntry; if (rsobj.HasPack) { var crcPath = $"data/{path.TrimEnd('/') + (rsobj.Compressed ? "/packed" : "")}"; Console.WriteLine(crcPath); Logstream.WriteLine(crcPath); var crc = calc_crc(crcPath); lsFile.Entries.TryGetValue(crc, out fileEntry); lsFile.Entries.Remove(crc); } else { fileEntry = null; } offsetParts[rsobj.FolderDepth - 1] = fileEntry; Array.Clear(offsetParts, rsobj.FolderDepth, offsetParts.Length - (rsobj.FolderDepth)); if (!path.EndsWith("/")) { LSEntryObject lsentry = offsetParts.LastOrDefault(x => x != null); if (lsentry == null) { continue; } var fileData = new byte[0]; if (rsobj.CmpSize > 0) { fileData = GetFileDataDecompressed(lsentry.DTOffset + rsobj.OffInPack, rsobj.CmpSize, lsentry.DTIndex); } if (fileData.Length != rsobj.DecSize) { Console.WriteLine("Error: File length doesn't match specified decompressed length, skipping"); Logstream.WriteLine( "Error: File length doesn't match specified decompressed length, skipping"); continue; } var folder = Path.GetDirectoryName($"data{region}/{path}"); if (!Directory.Exists(folder)) { Directory.CreateDirectory(folder); } File.WriteAllBytes($"data{region}/{path}", fileData); } } //totalsize += _resource.Size; // clean up rfFile._workingSource.Close(); if (File.Exists($"resource{region}.dec")) { File.Delete($"resource{region}.dec"); } if (File.Exists($"resource{region}")) { File.Delete($"resource{region}"); } for (int i = 0; i < lsFile.Entries.Count; i++) { Console.Clear(); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Missing hashes:\n"); Console.WriteLine(lsFile.Entries.Keys[i].ToString("X8")); } Console.ReadLine(); Logstream.Close(); }
// Rebuilding attempts. Almost works, but needs to handle embedded LS entries. private static unsafe void RebuildArchive(string patchFolder) { LSEntryObject _resMain = lsFile.Entries[calc_crc("resource")]; byte[] resource = GetFileDataDecompressed(_resMain.DTOffset + (uint)_resMain.PaddingLength, _resMain.Size, _resMain.DTIndex); File.WriteAllBytes("resource", resource); Console.WriteLine($"Rebuilding archive..."); RFFile rfFile = new RFFile("resource"); var pathParts = new string[20]; var offsetParts = new LSEntryObject[20]; using (FileStream strm = File.Create("dt_rebuild")) { int lsSize = 0; foreach (ResourceEntryObject rsobj in rfFile.ResourceEntries.Where(rsobj => rsobj != null)) { pathParts[rsobj.FolderDepth - 1] = rsobj.EntryString; Array.Clear(pathParts, rsobj.FolderDepth, pathParts.Length - (rsobj.FolderDepth + 1)); var path = string.Join("", pathParts); LSEntryObject fileEntry; if (rsobj.HasPack) { // if this is the end of a pack, update info and write end padding if (offsetParts.Where(x => x != null).ToArray().Length > 0) { LSEntryObject lsentry = offsetParts.Last(x => x != null); int align = lsSize.RoundUp(0x10) - lsSize; if (lsentry.DTOffset != strm.Position + 0x80) { for (int i = 0; i < 0x60 + align; i++) { strm.WriteByte(0xbb); } } lsentry.Size = lsSize; } // grab a new pack using the filepath we've built then write start padding. lsSize = 0; var crcPath = $"data/{path.TrimEnd('/') + (rsobj.Compressed ? "/packed" : "")}"; var crc = calc_crc(crcPath); lsFile.Entries.TryGetValue(crc, out fileEntry); fileEntry.DTOffset = (uint)strm.Position; for (int i = 0; i < 0x80; i++, lsSize++) { strm.WriteByte(0xcc); } } else { fileEntry = null; } offsetParts[rsobj.FolderDepth - 1] = fileEntry; Array.Clear(offsetParts, rsobj.FolderDepth, offsetParts.Length - (rsobj.FolderDepth)); if (!path.EndsWith("/")) { if (File.Exists($"{patchFolder}/{path}")) { LSEntryObject lsentry = offsetParts.Last(x => x != null); Console.WriteLine($"{patchFolder}/{path}"); Logstream.WriteLine($"{patchFolder}/{path}"); byte[] raw = File.ReadAllBytes($"{patchFolder}/{path}"); byte[] compressed = Util.Compress(raw); int align = compressed.Length.RoundUp(0x10) - compressed.Length; long off = strm.Position - lsentry.DTOffset; strm.Write(compressed, 0, compressed.Length); // write file borders for (int i = 0; i < 0x20 + align; i++) { strm.WriteByte(0xcc); } lsSize += compressed.Length + 0x20 + align; rsobj.CmpSize = compressed.Length; rsobj.DecSize = raw.Length; rsobj.OffInPack = (uint)off; } } } var entry = offsetParts.Last(x => x != null); entry.DTOffset = (uint)strm.Position - (uint)lsSize; entry.Size = lsSize; // Update resource and LS files. rfFile.UpdateEntries(); byte[] dec = rfFile._workingSource.Slice((int)rfFile.Header.HeaderLen1, (int)(rfFile._workingSource.Length - rfFile.Header.HeaderLen1)); byte[] cmp = Util.Compress(dec); rfFile.Header.CompressedLen = (uint)cmp.Length; rfFile.Header.DecompressedLen = (uint)dec.Length; byte[] header = rfFile.Header.ToArray(); byte[] full = header.Concat(cmp).ToArray(); // Patch the resource data back into the DT file. long rOff = strm.Position; strm.Write(full, 0, full.Length); rfFile._workingSource.Close(); lsFile.Entries[calc_crc("resource")].Size = full.Length; lsFile.Entries[calc_crc("resource")].DTOffset = (uint)rOff; lsFile.Entries[calc_crc("resource(us_en)")].Size = full.Length; lsFile.Entries[calc_crc("resource(us_en)")].DTOffset = (uint)rOff; lsFile.Entries[calc_crc("resource(us_fr)")].Size = full.Length; lsFile.Entries[calc_crc("resource(us_fr)")].DTOffset = (uint)rOff; lsFile.Entries[calc_crc("resource(us_sp)")].Size = full.Length; lsFile.Entries[calc_crc("resource(us_sp)")].DTOffset = (uint)rOff; lsFile.UpdateEntries(); } if (File.Exists("resource")) { File.Delete("resource"); } if (File.Exists("resource.dec")) { File.Delete("resource.dec"); } }
private static unsafe void PatchArchive(string resourceString, string patchFolder) { LSEntryObject _resource = lsFile.Entries[calc_crc(resourceString)]; byte[] resource = GetFileDataDecompressed(_resource.DTOffset + (uint)_resource.PaddingLength, _resource.Size, _resource.DTIndex); File.WriteAllBytes(resourceString, resource); Console.WriteLine($"Patching {resourceString}"); RFFile rfFile = new RFFile(resourceString); var pathParts = new string[20]; var offsetParts = new LSEntryObject[20]; foreach (ResourceEntryObject rsobj in rfFile.ResourceEntries) { if (rsobj == null) { continue; } pathParts[rsobj.FolderDepth - 1] = rsobj.EntryString; Array.Clear(pathParts, rsobj.FolderDepth, pathParts.Length - (rsobj.FolderDepth + 1)); var path = string.Join("", pathParts); LSEntryObject fileEntry; if (rsobj.HasPack) { var crcPath = $"data/{path.TrimEnd('/') + (rsobj.Compressed ? "/packed" : "")}"; var crc = calc_crc(crcPath); lsFile.Entries.TryGetValue(crc, out fileEntry); } else { fileEntry = null; } offsetParts[rsobj.FolderDepth - 1] = fileEntry; Array.Clear(offsetParts, rsobj.FolderDepth, offsetParts.Length - (rsobj.FolderDepth + 1)); if (!path.EndsWith("/")) { if (File.Exists($"{patchFolder}/{path}")) { Console.WriteLine($"Patch found: {patchFolder}/{path}"); Logstream.WriteLine($"Patch found: {patchFolder}/{path}"); LSEntryObject lsentry = offsetParts.Last(x => x != null); byte[] raw = File.ReadAllBytes($"{patchFolder}/{path}"); byte[] compressed = Util.Compress(raw); if (compressed.Length > rsobj.CmpSize + 1) { Console.WriteLine("Patching files larger than original not yet supported, skipping"); continue; } rsobj.CmpSize = compressed.Length; rsobj.DecSize = raw.Length; uint difference = 0; DataSource src = GetFileChunk(lsentry.DTOffset, lsentry.Size, lsentry.DTIndex, out difference); VoidPtr addr = src.Address + difference; addr += rsobj.OffInPack; for (int i = 0; i < compressed.Length; i++) { *(byte *)(addr + i) = compressed[i]; } // write 0xCC over unused bytes. addr += compressed.Length; int truncateBytes = (int)rsobj.CmpSize - compressed.Length; for (int i = 0; i < truncateBytes; i++) { *(byte *)(addr + i) = 0xCC; } src.Close(); } } } // Update resource and LS files. rfFile.UpdateEntries(); byte[] dec = rfFile._workingSource.Slice((int)rfFile.Header.HeaderLen1, (int)(rfFile._workingSource.Length - rfFile.Header.HeaderLen1)); byte[] cmp = Util.Compress(dec); rfFile.Header.CompressedLen = (uint)cmp.Length; rfFile.Header.DecompressedLen = (uint)dec.Length; byte[] header = rfFile.Header.ToArray(); byte[] full = header.Concat(cmp).ToArray(); lsFile.Entries[calc_crc(resourceString)].Size = full.Length; lsFile.UpdateEntries(); // Patch the resource data back into the DT file. uint diff; DataSource rSource = GetFileChunk(_resource.DTOffset, _resource.Size, _resource.DTIndex, out diff); VoidPtr curAddr = rSource.Address + diff; for (int i = 0; i < full.Length; i++) { *(byte *)(curAddr + i) = full[i]; } rSource.Close(); rfFile._workingSource.Close(); if (File.Exists(resourceString)) { File.Delete(resourceString); } if (File.Exists(resourceString + ".dec")) { File.Delete(resourceString + ".dec"); } }
private static void Unpack_update(string resFile) { Console.WriteLine("Parsing resource file.."); RFFile rfFile = new RFFile(resFile); var pathParts = new string[20]; DataSource _curPacked = new DataSource(); string mainfolder = ""; string region = ""; if (resFile.Contains("(")) { region = resFile.Substring(resFile.IndexOf("(", StringComparison.Ordinal), 7); } foreach (ResourceEntryObject rsobj in rfFile.ResourceEntries) { if (rsobj == null) { continue; } pathParts[rsobj.FolderDepth - 1] = rsobj.EntryString; Array.Clear(pathParts, rsobj.FolderDepth, pathParts.Length - (rsobj.FolderDepth + 1)); var path = $"data{region}/{string.Join("", pathParts)}"; if (rsobj.HasPack) { path += (rsobj.Compressed ? "packed" : ""); if (File.Exists(path)) { _curPacked = new DataSource(FileMap.FromFile(path)); mainfolder = path.Remove(path.Length - 6); } continue; } if (!(rsobj.inPatch && path.Contains(mainfolder) && !string.IsNullOrEmpty(mainfolder))) { continue; } if (path.EndsWith("/")) { if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } } else { var fileData = new byte[0]; if (rsobj.CmpSize > 0) { byte[] tmp = _curPacked.Slice((int)rsobj.OffInPack, 4); if (tmp[0] == 0x78 && tmp[1] == 0x9c) { fileData = Util.DeCompress(_curPacked.Slice((int)rsobj.OffInPack, (int)rsobj.CmpSize)); } else { fileData = _curPacked.Slice((int)rsobj.OffInPack, (int)rsobj.DecSize); } } Console.WriteLine(path); Logstream.WriteLine($"{path} : size: {rsobj.DecSize:X8}"); if (fileData.Length != rsobj.DecSize) { Console.WriteLine("Error: File length doesn't match specified decompressed length, quiting"); Logstream.WriteLine("Error: File length doesn't match specified decompressed length, quiting"); return; } File.WriteAllBytes(path, fileData); } } Logstream.Close(); rfFile._workingSource.Close(); Console.WriteLine("Extraction finished."); if (File.Exists($"resource{region}.dec")) { File.Delete($"resource{region}.dec"); } }