private void DeleteAnimHKX(string name) { if (ContainerType != TaeFileContainerType.ANIBND) { throw new NotImplementedException("Not supported for anything other than ANIBND right now."); } BinderFile existingFile = containerANIBND.Files.FirstOrDefault(ff => ff.Name.Contains(name)); BinderFile existingFile_2010 = containerANIBND_2010?.Files.FirstOrDefault(ff => ff.Name.Contains(name)); if (existingFile != null) { containerANIBND.Files.Remove(existingFile); } if (existingFile_2010 != null) { containerANIBND_2010.Files.Remove(existingFile_2010); } if (hkxInBND.ContainsKey(name)) { hkxInBND.Remove(name); } }
public static void Write(BinaryWriterEx bw, BinderFile file, int index, Binder.Format format) { bw.WriteByte((byte)file.Flags); bw.WriteByte(0); bw.WriteByte(0); bw.WriteByte(0); bw.WriteInt32(-1); bw.ReserveInt64($"FileSize{index}"); if (Binder.HasUncompressedSize(format)) { bw.WriteInt64(file.Bytes.LongLength); } if (Binder.HasLongOffsets(format)) { bw.ReserveInt64($"FileOffset{index}"); } else { bw.ReserveUInt32($"FileOffset{index}"); } if (Binder.HasID(format)) { bw.WriteInt32(file.ID); } bw.ReserveUInt32($"FileName{index}"); }
public void AddNewHKX(string name, byte[] data, out byte[] dataForAnimContainer) { if (ContainerType != TaeFileContainerType.ANIBND) { throw new NotImplementedException("Not supported for anything other than ANIBND right now."); } dataForAnimContainer = data; if (ContainerType == TaeFileContainerType.ANIBND) { DeleteAnimHKX(name + ".hkx"); if (GameDataManager.GameTypeIsHavokTagfile) { if (HavokDowngrade.SimpleCheckIfHkxBytesAre2015(data)) { BinderFile f = new BinderFile(Binder.FileFlags.Flag1, int.Parse(name.Replace("_", "").Replace("a", "")), name + ".hkx", data); containerANIBND.Files.Add(f); containerANIBND.Files = containerANIBND.Files.OrderBy(bf => bf.ID).ToList(); byte[] bytes2010 = HavokDowngrade.DowngradeSingleFileInANIBND(containerANIBND, f, isUpgrade: false); BinderFile f2010 = new BinderFile(Binder.FileFlags.Flag1, int.Parse(name.Replace("_", "").Replace("a", "")), name + ".hkx", bytes2010); containerANIBND_2010.Files.Add(f2010); containerANIBND_2010.Files = containerANIBND_2010.Files.OrderBy(bf => bf.ID).ToList(); containerANIBND_2010_IsModified = true; hkxInBND.Add(name + ".hkx", bytes2010); dataForAnimContainer = bytes2010; IsModified = true; } else { hkxInBND.Add(name + ".hkx", data); BinderFile f2010 = new BinderFile(Binder.FileFlags.Flag1, int.Parse(name.Replace("_", "").Replace("a", "")), name + ".hkx", data); containerANIBND_2010.Files.Add(f2010); containerANIBND_2010.Files = containerANIBND_2010.Files.OrderBy(bf => bf.ID).ToList(); containerANIBND_2010_IsModified = true; byte[] bytes2015 = HavokDowngrade.DowngradeSingleFileInANIBND(containerANIBND_2010, f2010, isUpgrade: true); BinderFile f = new BinderFile(Binder.FileFlags.Flag1, int.Parse(name.Replace("_", "").Replace("a", "")), name + ".hkx", bytes2015); containerANIBND.Files.Add(f); containerANIBND.Files = containerANIBND.Files.OrderBy(bf => bf.ID).ToList(); IsModified = true; } } else { hkxInBND.Add(name + ".hkx", data); BinderFile f = new BinderFile(Binder.FileFlags.Flag1, int.Parse(name.Replace("_", "").Replace("a", "")), name + ".hkx", data); containerANIBND.Files.Add(f); containerANIBND.Files = containerANIBND.Files.OrderBy(bf => bf.ID).ToList(); IsModified = true; } } }
public FMGHandler(TextHandler th, BND3 bnd, string name) { IEnumerable <BinderFile> list = bnd.Files.Where(f => Path.GetFileNameWithoutExtension(f.Name) == name); FileProper = list.ElementAt(0); Proper = FMG.Read(FileProper.Bytes); if (list.Count() > 1) { FilePatch = list.ElementAtOrDefault(1); Patch = FMG.Read(FilePatch.Bytes); } }
public TAE4Handler(BinderFile file) { File = file; try { Load(); UTIL.WriteLog(Path.GetFileName(file.Name) + " loaded."); } catch (Exception ex) { UTIL.LogException(Path.GetFileName(File.Name) + " failed to load.", ex); } }
void WriteMapFFXBND(int mapBlockID, HashSet <int> enemyModels) { HashSet <int> addedIDs = new HashSet <int>(); string ffxBNDName = $"FRPG_SfxBnd_m{mapBlockID:00}"; BND3 ffxBND = BND3.Read(GetGameDataResource(ffxBNDName + "_ffxbnd")); int bndIndex = ffxBND.Files.Where(file => file.ID < 100000).Max(file => file.ID) + 1; Dictionary <string, BND3> openBNDs = new Dictionary <string, BND3>(); foreach (int enemyModel in enemyModels) { foreach (var indexPath in EnemyGenerator.EnemyFFXSources[enemyModel]) { if (addedIDs.Contains(indexPath.Key)) { continue; // already added by another enemy } if (!indexPath.Value.Contains($"m{mapBlockID:00}")) // if enemy FFX is not already present in current block (or sub) FFXBND file... { string ffxFileName = $"f{indexPath.Key:0000000}.ffx"; if (!openBNDs.ContainsKey(indexPath.Value)) { openBNDs[indexPath.Value] = BND3.Read(GetGameDataResource(indexPath.Value + "_ffxbnd")); } // Lookup should never fail, by construction of FFXInfo. (But just in case it does, I'd rather print an error and have missing VFX.) var matches = openBNDs[indexPath.Value].Files.Where(f => f.Name.EndsWith(ffxFileName)); if (!matches.Any()) { Console.WriteLine($"ERROR: Could not find FFX {ffxFileName} in FFXBND {indexPath.Value}. VFX will be missing; please report to Grim!"); continue; } BinderFile ffxFile = matches.First(); ffxFile.ID = bndIndex; bndIndex++; ffxBND.Files.Add(ffxFile); addedIDs.Add(indexPath.Key); } } } ffxBND.Write(Mod.GameDir + $@"sfx\{ffxBNDName}.ffxbnd.dcx"); }
void WriteMapLUABND(string luabndName, HashSet <int> battleGoalIDs, HashSet <int> logicGoalIDs = null) { // Reads from a folder full of unpacked binary AI Lua scripts and generates a LUABND for the given map ID name. // Note that in DSR, LUAINFO files are not needed (and LUAGNL was never needed). if (!battleGoalIDs.Any()) { throw new Exception("Empty list of battle goal IDs passed to Lua packer, which is not ever expected."); } BND3 mapLuaBND = new BND3 { Timestamp = "07D7R6" }; mapLuaBND.Compression = DCX.Type.DarkSouls1; int bndID = 1055; // first "safe" ID for loading order, according to vanilla use if (logicGoalIDs != null) { foreach (int goalID in logicGoalIDs) { string scriptPath = $"_{goalID:000000}_logic"; string bndPath = $@"N:\FRPG\data\INTERROOT_x64\script\ai\out\bin\{goalID:000000}_logic.lua"; BinderFile bndEntry = new BinderFile(Binder.FileFlags.x40, bndID, bndPath, GetGameDataResource(scriptPath)); mapLuaBND.Files.Add(bndEntry); bndID++; } } foreach (int goalID in battleGoalIDs) { string scriptPath = $"_{goalID:000000}_battle"; string bndPath = $@"N:\FRPG\data\INTERROOT_x64\script\ai\out\bin\{goalID:000000}_battle.lua"; BinderFile bndEntry = new BinderFile(Binder.FileFlags.x40, bndID, bndPath, GetGameDataResource(scriptPath)); mapLuaBND.Files.Add(bndEntry); bndID++; } mapLuaBND.Write(Mod.GameDir + $@"script\{luabndName}.luabnd.dcx"); }
public static byte[] DowngradeSingleFileInANIBND(IBinder anibnd, BinderFile file, bool isUpgrade) { byte[] resultBytes = null; lock (_lock_DoingTask) { BinderFile compendium = null; foreach (var f in anibnd.Files) { var nameCheck = f.Name.ToUpper(); if (nameCheck.EndsWith(".COMPENDIUM")) { compendium = f; } } CreateTempIfNeeded(); if (compendium != null) { SaveCompendiumToTemp(compendium.Bytes); } var proc = GetDowngradeHkxProcess(file.Name, file.Bytes, compendium != null, 0, (convertProc, result, j) => { resultBytes = result; }, isUpgrade); proc.Start(); proc.WaitForExit(); } return(resultBytes); }
internal void Write(BinaryWriterEx bw, List <BinderFile> files) { bw.BigEndian = BigEndian; bw.WriteASCII("BHF4"); bw.WriteBoolean(Flag1); bw.WriteBoolean(Flag2); bw.WriteByte(0); bw.WriteByte(0); bw.WriteInt32(0x10000); bw.WriteInt32(files.Count); bw.WriteInt64(0x40); bw.WriteFixStr(Timestamp, 8); bw.WriteInt64(Binder.FileHeaderSize(Format)); bw.WriteInt64(0); bw.WriteBoolean(Unicode); bw.WriteByte((byte)Format); bw.WriteByte(Extended); bw.WriteByte(0); bw.WriteInt32(0); if (Extended == 4) { bw.ReserveInt64("HashGroups"); } else { bw.WriteInt64(0); } for (int i = 0; i < files.Count; i++) { FileHeader.Write(bw, files[i], i, Format); } for (int i = 0; i < files.Count; i++) { BinderFile file = files[i]; bw.FillUInt32($"FileName{i}", (uint)bw.Position); if (Unicode) { bw.WriteUTF16(file.Name, true); } else { bw.WriteShiftJIS(file.Name, true); } } if (Extended == 4) { uint groupCount = 0; for (uint p = (uint)files.Count / 7; p <= 100000; p++) { if (SFUtil.IsPrime(p)) { groupCount = p; break; } } if (groupCount == 0) { throw new InvalidOperationException("Hash group count not determined in BXF4."); } var hashLists = new List <PathHash> [groupCount]; for (int i = 0; i < groupCount; i++) { hashLists[i] = new List <PathHash>(); } for (int i = 0; i < files.Count; i++) { var pathHash = new PathHash(i, files[i].Name); uint group = pathHash.Hash % groupCount; hashLists[group].Add(pathHash); } for (int i = 0; i < groupCount; i++) { hashLists[i].Sort((ph1, ph2) => ph1.Hash.CompareTo(ph2.Hash)); } var hashGroups = new List <HashGroup>(); var pathHashes = new List <PathHash>(); int count = 0; foreach (List <PathHash> hashList in hashLists) { int index = count; foreach (PathHash pathHash in hashList) { pathHashes.Add(pathHash); count++; } hashGroups.Add(new HashGroup(index, count - index)); } bw.Pad(0x8); bw.FillInt64("HashGroups", bw.Position); bw.ReserveInt64("PathHashes"); bw.WriteUInt32(groupCount); bw.WriteInt32(0x00080810); foreach (HashGroup hashGroup in hashGroups) { hashGroup.Write(bw); } // No padding after section 1 bw.FillInt64("PathHashes", bw.Position); foreach (PathHash pathHash in pathHashes) { pathHash.Write(bw); } } }
public static void WriteParams(SQLiteConnection con, string outputPath, bool overwriteOutputFiles) { // Writing a parambnd // Need to construct our BND3 files based on what's in our DB. // This is a kludge to create a mapping (filename -> (source_path, BND)). var bnds = new Dictionary <string, KeyValuePair <string, BND3> >(); // First thing to do is get our basic BND file setup. using (var cmd = new SQLiteCommand(@"SELECT * FROM 'bnd_metadata'", con)) { var reader = cmd.ExecuteReader(); while (reader.Read()) { var bnd = new BND3 { BigEndian = reader.GetBoolean(reader.GetOrdinal(@"big_endian")), BitBigEndian = reader.GetBoolean(reader.GetOrdinal(@"bit_big_endian")), Compression = (DCX.Type)Enum.Parse(typeof(DCX.Type), reader.GetString(reader.GetOrdinal(@"compression"))), Format = (Binder.Format)reader.GetInt64(reader.GetOrdinal(@"format")), Unk18 = reader.GetInt32(reader.GetOrdinal(@"unk18")), Version = reader.GetString(reader.GetOrdinal(@"version")), Files = new List <BinderFile>() }; var filename = reader.GetString(reader.GetOrdinal(@"filename")); bnds.Add(Path.GetFileName(filename), new KeyValuePair <string, BND3>(filename, bnd)); } } // Get our list of files. We'll grab the contents afterwards. // Note that it's a List because there can be multiple files associated with a given ParamType. var files = new Dictionary <string, KeyValuePair <string, List <BinderFile> > >(); using (var cmd = new SQLiteCommand(@"SELECT * FROM 'bnd_contents'", con)) { var reader = cmd.ExecuteReader(); while (reader.Read()) { var source_file = reader.GetString(reader.GetOrdinal(@"source_file")); var file = new BinderFile { ID = reader.GetInt32(reader.GetOrdinal(@"file_id")), Name = reader.GetString(reader.GetOrdinal(@"name")), Flags = (Binder.FileFlags)reader.GetInt64(reader.GetOrdinal(@"flags")), CompressionType = (DCX.Type)System.Enum.Parse(typeof(DCX.Type), reader.GetString(reader.GetOrdinal(@"compression_type"))) }; var paramType = reader.GetString(reader.GetOrdinal("param_type")); // Add the file to both our list of files in the appropriate BND and also to our dictionary // so that we can continue building it out. bnds[source_file].Value.Files.Add(file); if (files.ContainsKey(Path.GetFileNameWithoutExtension(file.Name))) { var dictValue = files.TryGetValue(Path.GetFileNameWithoutExtension(file.Name), out KeyValuePair <string, List <BinderFile> > value) ? value : new KeyValuePair <string, List <BinderFile> >(paramType, new List <BinderFile>()); dictValue.Value.Add(file); } else { var dictValue = new KeyValuePair <string, List <BinderFile> >(paramType, new List <BinderFile>() { file }); files.Add(Path.GetFileNameWithoutExtension(file.Name), dictValue); } } } // Get all of our PARAMDEFs Dictionary <string, PARAMDEF> paramTypeToParamDef = new Dictionary <string, PARAMDEF>(); using (var cmd = new SQLiteCommand(@"SELECT * FROM 'paramdef_metadata';", con)) using (var fieldsCmd = new SQLiteCommand(@"SELECT * FROM 'paramdef_fields' WHERE param_type=$param_type;", con)) { var reader = cmd.ExecuteReader(); while (reader.Read()) { PARAMDEF paramdef = new PARAMDEF { BigEndian = reader.GetBoolean(reader.GetOrdinal(@"big_endian")), Compression = (DCX.Type)Enum.Parse(typeof(DCX.Type), reader.GetString(reader.GetOrdinal(@"compression"))), ParamType = reader.GetString(reader.GetOrdinal(@"param_type")), Unicode = reader.GetBoolean(reader.GetOrdinal(@"unicode")), DataVersion = reader.GetInt16(reader.GetOrdinal(@"data_version")), FormatVersion = reader.GetInt16(reader.GetOrdinal(@"format_version")) }; paramTypeToParamDef.Add(paramdef.ParamType, paramdef); } } using (var cmd = new SQLiteCommand(@"SELECT * FROM 'paramdef_fields' WHERE param_type=$param_type;", con)) { foreach (KeyValuePair <string, PARAMDEF> keyValue in paramTypeToParamDef) { // Get all the fields for our paramdef AddParamToCommand(cmd, @"$param_type", keyValue.Key); var fieldReader = cmd.ExecuteReader(); var fields = new List <Field>(); while (fieldReader.Read()) { var descOrdinal = fieldReader.GetOrdinal(@"description"); var field = new Field { ArrayLength = fieldReader.GetInt32(fieldReader.GetOrdinal(@"array_length")), BitSize = fieldReader.GetInt32(fieldReader.GetOrdinal(@"bit_size")), Default = fieldReader.GetFloat(fieldReader.GetOrdinal(@"default")), // Description can be NULL. Need to check. Sigh. Description = fieldReader.IsDBNull(descOrdinal) ? null : fieldReader.GetFieldValue <string>(descOrdinal), DisplayFormat = fieldReader.GetString(fieldReader.GetOrdinal(@"display_format")), DisplayName = fieldReader.GetString(fieldReader.GetOrdinal(@"display_name")), DisplayType = (DefType)System.Enum.Parse(typeof(DefType), fieldReader.GetString(fieldReader.GetOrdinal(@"display_type"))), EditFlags = (EditFlags)fieldReader.GetInt64(fieldReader.GetOrdinal(@"edit_flags")), Increment = fieldReader.GetFloat(fieldReader.GetOrdinal(@"increment")), InternalName = fieldReader.GetString(fieldReader.GetOrdinal(@"internal_name")), InternalType = fieldReader.GetString(fieldReader.GetOrdinal(@"internal_type")), Maximum = fieldReader.GetFloat(fieldReader.GetOrdinal(@"maximum")), Minimum = fieldReader.GetFloat(fieldReader.GetOrdinal(@"minimum")), SortID = fieldReader.GetInt32(fieldReader.GetOrdinal(@"sort_id")) }; fields.Add(field); } keyValue.Value.Fields = fields; var exc = new Exception(); if (!keyValue.Value.Validate(out exc)) { throw exc; } fieldReader.Close(); } } // Now we need to grab our contents for each file. foreach (KeyValuePair <string, KeyValuePair <string, List <BinderFile> > > entry in files) { // Want to iterate through each file. Keep in mind multiple tables can have same ParamType, so we can't loop via ParamType. // e.g. DeS AtkParam_Npc and AtkParam_Pc foreach (BinderFile file in entry.Value.Value) { //var tableName = Path.GetFileNameWithoutExtension(file.Name); var tableName = entry.Key; Console.WriteLine("Reading from: " + tableName); using (var cmd = new SQLiteCommand(@"SELECT * FROM '" + tableName + "';", con)) using (var metadataCmd = new SQLiteCommand(@"SELECT * FROM param_metadata WHERE param_type = $param_type", con)) { var paramDef = paramTypeToParamDef[entry.Value.Key]; var paramFile = new PARAM(); paramFile.ParamType = entry.Value.Key; AddParamToCommand(metadataCmd, @"$param_type", entry.Value.Key); var metadataReader = metadataCmd.ExecuteReader(); while (metadataReader.Read()) { paramFile.BigEndian = metadataReader.GetBoolean(metadataReader.GetOrdinal(@"big_endian")); paramFile.Compression = (DCX.Type)Enum.Parse(typeof(DCX.Type), metadataReader.GetString(metadataReader.GetOrdinal(@"compression"))); paramFile.Format2D = (PARAM.FormatFlags1)Enum.Parse(typeof(PARAM.FormatFlags1), metadataReader.GetString(metadataReader.GetOrdinal(@"format2d"))); paramFile.Format2E = (PARAM.FormatFlags2)Enum.Parse(typeof(PARAM.FormatFlags2), metadataReader.GetString(metadataReader.GetOrdinal(@"format2e"))); byte[] buf = new byte[1]; metadataReader.GetBytes(metadataReader.GetOrdinal("paramdef_format_version"), 0, buf, 0, 1); paramFile.ParamdefFormatVersion = buf[0]; paramFile.Unk06 = metadataReader.GetInt16(metadataReader.GetOrdinal(@"unk06")); paramFile.ParamdefDataVersion = metadataReader.GetInt16(metadataReader.GetOrdinal(@"paramdef_data_version")); } var reader = cmd.ExecuteReader(); paramFile.Rows = new List <PARAM.Row>(); while (reader.Read()) { var id = reader.GetInt32(reader.GetOrdinal(@"id")); // Description can be NULL var descOrdinal = reader.GetOrdinal(@"description"); var description = reader.IsDBNull(descOrdinal) ? null : reader.GetFieldValue <string>(descOrdinal); var row = new PARAM.Row(id, description, paramDef); foreach (Field field in paramDef.Fields) { var name = field.InternalName; // Not using InternalType. I don't know the complete set of raw strings across all games. // It would be better to use not be tied to a display field. For some value of "better". var type = field.DisplayType; switch (type) { // Padding case case DefType.dummy8: int length = field.ArrayLength; if (field.BitSize == -1) { row[name].Value = Enumerable.Repeat((byte)0, length).ToArray(); } else { row[name].Value = 0; } break; // All the integer cases case DefType.s8: case DefType.s16: case DefType.s32: case DefType.u8: case DefType.u16: case DefType.u32: row[name].Value = reader.GetInt32(reader.GetOrdinal(name)); break; // Float cases case DefType.f32: row[name].Value = reader.GetFloat(reader.GetOrdinal(name)); break; // String case case DefType.fixstr: case DefType.fixstrW: row[name].Value = reader.GetString(reader.GetOrdinal(name)); break; } } paramFile.Rows.Add(row); } // Don't apply carefully. We don't have the ability to set the DetectedSize. It only occurs on Read paramFile.ApplyParamdef(paramDef); var exc = new Exception(); if (!paramFile.Validate(out exc)) { Console.WriteLine("Failed with exception: " + exc); } file.Bytes = paramFile.Write(); } } } foreach (KeyValuePair <string, KeyValuePair <string, BND3> > entry in bnds) { // Default to writing the original file. // If output path is defined, put everything there. var outputFile = entry.Value.Key; if (outputPath != null) { outputFile = outputPath + Path.DirectorySeparatorChar + entry.Key; Console.WriteLine("Output current parambnd.dcx: " + outputFile); } if (!File.Exists(outputFile) || overwriteOutputFiles) { entry.Value.Value.Write(outputFile); } else { // Backup the eisting file before writing. // Just append the unix time and ".bak" to avoid managing whole sets of backup nonsense. var unixTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(); string backupFile = outputFile + "." + unixTime + ".bak"; Console.WriteLine("Collision found. Not overwriting. Moving original file to backup at: " + backupFile); File.Move(outputFile, backupFile); entry.Value.Value.Write(outputFile); } } }
public static void WriteBinderFiles(IBinder bnd, XmlWriter xw, string targetDir) { xw.WriteStartElement("files"); var pathCounts = new Dictionary <string, int>(); for (int i = 0; i < bnd.Files.Count; i++) { BinderFile file = bnd.Files[i]; string root = ""; string path; if (Binder.HasNames(bnd.Format)) { path = YBUtil.UnrootBNDPath(file.Name, out root); } else if (Binder.HasIDs(bnd.Format)) { path = file.ID.ToString(); } else { path = i.ToString(); } xw.WriteStartElement("file"); xw.WriteElementString("flags", file.Flags.ToString()); if (Binder.HasIDs(bnd.Format)) { xw.WriteElementString("id", file.ID.ToString()); } if (root != "") { xw.WriteElementString("root", root); } xw.WriteElementString("path", path); string suffix = ""; if (pathCounts.ContainsKey(path)) { pathCounts[path]++; suffix = $" ({pathCounts[path]})"; xw.WriteElementString("suffix", suffix); } else { pathCounts[path] = 1; } if (file.CompressionType != DCX.Type.Zlib) { xw.WriteElementString("compression_type", file.CompressionType.ToString()); } xw.WriteEndElement(); string outPath = $@"{targetDir}\{Path.GetDirectoryName(path)}\{Path.GetFileNameWithoutExtension(path)}{suffix}{Path.GetExtension(path)}"; Directory.CreateDirectory(Path.GetDirectoryName(outPath)); File.WriteAllBytes(outPath, file.Bytes); } xw.WriteEndElement(); }
//private static byte[] WaitForDowngradeHKXFinishAndRead(Process proc, int i) //{ // string pathOut = $@"{TEMPDIR}\out{i}.hkx"; // proc.WaitForExit(); //} public static void DowngradeAnibnd(string anibndPath, IProgress <double> prog = null) { lock (_lock_DoingTask) { IBinder anibnd = null; if (BND3.Is(anibndPath)) { anibnd = BND3.Read(anibndPath); } else if (BND4.Is(anibndPath)) { anibnd = BND4.Read(anibndPath); } else { throw new Exception("Invalid ANIBND"); } List <BinderFile> animations = new List <BinderFile>(); BinderFile skeleton = null; BinderFile compendium = null; foreach (var f in anibnd.Files) { var nameCheck = f.Name.ToUpper(); if (nameCheck.EndsWith(".COMPENDIUM")) { compendium = f; } else if (nameCheck.EndsWith("SKELETON.HKX")) { skeleton = f; } else if (nameCheck.EndsWith(".HKX")) { animations.Add(f); } } double progTotalFileCount = animations.Count + 1; if (skeleton != null) { progTotalFileCount++; } CreateTempIfNeeded(); bool converted_skeleton = false; bool[] converted_anim = new bool[animations.Count]; try { if (compendium != null) { SaveCompendiumToTemp(compendium.Bytes); } if (skeleton != null) { StartDowngradingHKX(skeleton.Bytes, compendium != null, -1, (result, i) => { skeleton.Bytes = result; converted_skeleton = true; }); } prog.Report(1 / progTotalFileCount); //for (int i = 0; i < animations.Count; i += PARALLEL_CONVERSIONS) //{ // //Process[] conversionProcs = new Process[PARALLEL_CONVERSIONS]; // for (int j = 0; j < PARALLEL_CONVERSIONS; j++) // { // if ((i + j) >= animations.Count) // break; // StartDowngradingHKX(animations[i + j].Bytes, compendium != null, i + j, result => // { // animations[i + j].Bytes = result; // converted_anim[i + j] = true; // }); // } // //for (int j = 0; j < PARALLEL_CONVERSIONS; j++) // //{ // // if (j >= animations.Count) // // break; // // prog.Report(((i + j) + 2) / progTotalFileCount); // // animations[i + j].Bytes = WaitForDowngradeHKXFinishAndRead(conversionProcs[j], i + j); // //} //} int numAnimsConverted = 0; int processCount = 0; int meme = animations.Count; for (int i = 0; i < meme; i++) { processCount++; StartDowngradingHKX(animations[i].Bytes, compendium != null, i, (result, j) => { lock (_lock_animFinished) { animations[j].Bytes = result; converted_anim[j] = true; numAnimsConverted++; prog.Report(((numAnimsConverted) + 2) / progTotalFileCount); processCount--; } }); while (processCount > PARALLEL_CONVERSIONS) { System.Threading.Thread.Sleep(1000); } } bool everythingFinished = false; do { everythingFinished = true; if (skeleton != null && !converted_skeleton) { everythingFinished = false; } if (numAnimsConverted < meme) { everythingFinished = false; } //for (int i = 0; i < converted_anim.Length; i++) //{ // if (!converted_anim[i]) // { // everythingFinished = false; // break; // } //} System.Threading.Thread.Sleep(1000); }while (!everythingFinished); if (File.Exists(anibndPath + ".2010")) { File.Delete(anibndPath + ".2010"); } if (anibnd is BND3 asBnd3) { asBnd3.Write(anibndPath + ".2010"); } else if (anibnd is BND4 asBnd4) { asBnd4.Write(anibndPath + ".2010"); } prog.Report(1); } finally { DeleteTempIfNeeded(); } } }
public static void DowngradeAnibnd(string anibndPath, IProgress <double> prog = null) { lock (_lock_DoingTask) { lock (_lock_messageBoxSpamPrevent) _messageBoxSpamCancel = false; IBinder anibnd = null; if (BND3.Is(anibndPath)) { anibnd = BND3.Read(anibndPath); } else if (BND4.Is(anibndPath)) { anibnd = BND4.Read(anibndPath); } else { throw new Exception("Invalid ANIBND"); } List <BinderFile> animations = new List <BinderFile>(); BinderFile skeleton = null; BinderFile compendium = null; foreach (var f in anibnd.Files) { var nameCheck = f.Name.ToUpper(); if (nameCheck.EndsWith(".COMPENDIUM")) { compendium = f; } else if (nameCheck.EndsWith("SKELETON.HKX")) { skeleton = f; } else if (nameCheck.EndsWith(".HKX")) { animations.Add(f); } } double progTotalFileCount = animations.Count + 1; if (skeleton != null) { progTotalFileCount++; } CreateTempIfNeeded(); bool converted_skeleton = false; bool[] converted_anim = new bool[animations.Count]; try { if (compendium != null) { SaveCompendiumToTemp(compendium.Bytes); } bool wasCanceled = false; List <Process> processQueue = new List <Process>(); List <Process> processDeleteQueue = new List <Process>(); if (skeleton != null) { var skellingtonProc = GetDowngradeHkxProcess(skeleton.Name, skeleton.Bytes, compendium != null, -1, (convertProc, result, i) => { lock (_lock_animFinished) { if (result == null) { wasCanceled = true; return; } else { skeleton.Bytes = result; converted_skeleton = true; } } }); lock (_lock_processQueue) { processQueue.Add(skellingtonProc); } skellingtonProc.Start(); } prog.Report(1 / progTotalFileCount); //for (int i = 0; i < animations.Count; i += PARALLEL_CONVERSIONS) //{ // //Process[] conversionProcs = new Process[PARALLEL_CONVERSIONS]; // for (int j = 0; j < PARALLEL_CONVERSIONS; j++) // { // if ((i + j) >= animations.Count) // break; // StartDowngradingHKX(animations[i + j].Bytes, compendium != null, i + j, result => // { // animations[i + j].Bytes = result; // converted_anim[i + j] = true; // }); // } // //for (int j = 0; j < PARALLEL_CONVERSIONS; j++) // //{ // // if (j >= animations.Count) // // break; // // prog.Report(((i + j) + 2) / progTotalFileCount); // // animations[i + j].Bytes = WaitForDowngradeHKXFinishAndRead(conversionProcs[j], i + j); // //} //} int numAnimsConverted = 0; int meme = animations.Count; for (int i = 0; i < meme; i++) { bool checkCancelled = false; lock (_lock_processQueue) checkCancelled = wasCanceled; if (checkCancelled) { return; } var nextProc = GetDowngradeHkxProcess(animations[i].Name, animations[i].Bytes, compendium != null, i, (convertProc, result, j) => { if (result == null) { lock (_lock_processQueue) { wasCanceled = true; foreach (var proc in processDeleteQueue) { if (!proc.HasExited) { proc.Kill(); } if (processQueue.Contains(proc)) { processQueue.Remove(proc); } proc.Dispose(); } processDeleteQueue.Clear(); foreach (var proc in processQueue) { if (!proc.HasExited) { proc.Kill(); } proc.Dispose(); } processQueue.Clear(); for (int k = 0; k < meme; k++) { animations[k].Bytes = null; converted_anim[k] = false; } numAnimsConverted = meme; } } else { animations[j].Bytes = result; converted_anim[j] = true; numAnimsConverted++; prog.Report(((numAnimsConverted) + 2) / progTotalFileCount); lock (_lock_processQueue) { processDeleteQueue.Add(convertProc); } } }); lock (_lock_processQueue) { processQueue.Add(nextProc); } nextProc.Start(); int processCount = PARALLEL_CONVERSIONS + 1; do { Thread.Sleep(100); lock (_lock_processQueue) { foreach (var proc in processDeleteQueue) { try { if (!proc.HasExited) { proc.Kill(); } } catch (InvalidOperationException) { } if (processQueue.Contains(proc)) { processQueue.Remove(proc); } proc.Dispose(); } processDeleteQueue.Clear(); foreach (var proc in processQueue) { try { if (proc.HasExited) { processDeleteQueue.Add(proc); } } catch (InvalidOperationException) { processDeleteQueue.Add(proc); } } processCount = processQueue.Count; } }while (processCount > PARALLEL_CONVERSIONS); } bool everythingFinished = false; do { everythingFinished = true; if (skeleton != null && !converted_skeleton) { everythingFinished = false; } if (numAnimsConverted < meme) { everythingFinished = false; } //for (int i = 0; i < converted_anim.Length; i++) //{ // if (!converted_anim[i]) // { // everythingFinished = false; // break; // } //} System.Threading.Thread.Sleep(20); }while (!(everythingFinished || wasCanceled)); if (File.Exists(anibndPath + ".2010")) { File.Delete(anibndPath + ".2010"); } if (anibnd is BND3 asBnd3) { asBnd3.Write(anibndPath + ".2010"); } else if (anibnd is BND4 asBnd4) { asBnd4.Write(anibndPath + ".2010"); } prog.Report(1); } finally { DeleteTempIfNeeded(); } } }