public static Bundle Create(BJSON.Entry e) { Bundle res = new Bundle(); res.iscas = true; res.ebx = new List <ebxtype>(); res.res = new List <restype>(); res.chunk = new List <chunktype>(); foreach (BJSON.Field f in e.fields) { switch (f.fieldname) { case "path": res.path = (string)f.data; break; case "magicSalt": res.salt = BitConverter.ToUInt32((byte[])f.data, 0).ToString("X4"); break; case "alignMembers": res.align = (bool)f.data; break; case "storeCompressedSizes": res.compressed = (bool)f.data; break; case "totalSize": res.totalsize = BitConverter.ToUInt64((byte[])f.data, 0); break; case "dbxtotalSize": res.dbxtotalsize = BitConverter.ToUInt64((byte[])f.data, 0); break; case "ebx": res.ebx = ReadEbx(f); break; case "dbx": res.dbx = ReadDbx(f); break; case "res": res.res = ReadRes(f); break; case "chunks": case "chunks0": res.chunk.AddRange(ReadChunks(f)); break; } } return(res); }
private void ProcessFile() { BJSON.Entry root = lines[0]; bundles = new List <TOCBundleInfoStruct>(); chunks = new List <TOCChunkInfoStruct>(); foreach (BJSON.Field f in root.fields) { switch (f.fieldname) { case "bundles": ProcessBundles(f); break; case "chunks": ProcessChunks(f); break; case "cas": iscas = (bool)f.data; break; } } }
private bool BuildSB(string tocname) { rtb2.AppendText("Building SB file...\n"); string dir = Path.GetDirectoryName(tocname) + "\\"; string filename = Path.GetFileNameWithoutExtension(tocname) + ".sb"; string odir = Path.GetDirectoryName(toc.MyPath) + "\\"; string ofilename = Path.GetFileNameWithoutExtension(toc.MyPath) + ".sb"; rtb2.AppendText("Writing " + filename + " ...\n"); try { BJSON.Entry root = toc.lines[0]; BJSON.Field bundles = root.FindField("bundles"); BJSON.Field tbun = bundles; List<BJSON.Entry> list = (List<BJSON.Entry>)bundles.data; FileStream fs = new FileStream(dir + filename, FileMode.Create, FileAccess.Write); FileStream ofs = new FileStream(odir + ofilename, FileMode.Open, FileAccess.Read); MemoryStream m = new MemoryStream(); long gl_off = 0; for (int i = 0; i < list.Count; i++) { BJSON.Entry e = list[i]; long offset = 0; int size = 0; bool isbase = false; BJSON.Field offset_field = new BJSON.Field(); BJSON.Field size_field = new BJSON.Field(); BJSON.Field isbase_field = new BJSON.Field(); foreach (BJSON.Field f in e.fields) switch (f.fieldname) { case "offset": offset = BitConverter.ToInt64((byte[])f.data, 0); offset_field = f; break; case "size": size = BitConverter.ToInt32((byte[])f.data, 0); size_field = f; break; case "base": isbase = (bool)f.data; isbase_field = f; break; } if (SelectForReplacement[i] == null) { if (isbase) continue; offset_field.data = BitConverter.GetBytes(gl_off); CopyFileStream(ofs, m, offset, size); } else { byte[] buf = File.ReadAllBytes(SelectForReplacement[i]); size = buf.Length; if (isbase) isbase_field.data = false; offset_field.data = BitConverter.GetBytes(gl_off); size_field.data = BitConverter.GetBytes((long)size); m.Write(buf, 0, size); } gl_off += size; if (SelectForDuplication[i]) { BJSON.Entry te = new BJSON.Entry(); te.type = e.type; te.type87name = e.type87name; te.fields = new List<BJSON.Field>(); foreach (BJSON.Field f in e.fields) { BJSON.Field tf = new BJSON.Field(); tf.fieldname = f.fieldname; tf.type = f.type; switch (f.fieldname) { case "offset": tf.data = BitConverter.GetBytes(BitConverter.ToInt64((byte[])f.data, 0)); break; case "size": tf.data = BitConverter.GetBytes(BitConverter.ToInt32((byte[])f.data, 0)); break; case "base": tf.data = (bool)f.data; break; default: tf.data = f.data; break; } te.fields.Add(tf); } list.Insert(i + 1, te); bundles.data = list; SelectForDuplication.Insert(i + 1, false); SelectForReplacement.Insert(i + 1, null); } } ofs.Close(); MemoryStream t = new MemoryStream(); Helpers.WriteLEB128(t, (int)m.Length); m.WriteByte(0); int varsize = (int)t.Length; fs.WriteByte(0x82); Helpers.WriteLEB128(fs, varsize + 9 + (int)m.Length); byte[] buff = { 0x01, 0x62, 0x75, 0x6E, 0x64, 0x6C, 0x65, 0x73, 0x00 }; fs.Write(buff, 0, 9); fs.Write(t.ToArray(), 0, varsize); int rel_off = (int)fs.Position; fs.Write(m.ToArray(), 0, (int)m.Length); fs.Close(); rtb2.AppendText("Saved " + filename + "\nUpdating TOC Entries...\n"); for (int i = 0; i < list.Count; i++) { BJSON.Entry e = list[i]; long offset = 0; bool isbase = false; BJSON.Field offset_field = new BJSON.Field(); foreach (BJSON.Field f in e.fields) switch (f.fieldname) { case "offset": offset = BitConverter.ToInt64((byte[])f.data, 0); offset_field = f; break; case "base": isbase = (bool)f.data; break; } if (isbase) continue; offset_field.data = BitConverter.GetBytes(offset + rel_off); } } catch (Exception ex) { rtb2.AppendText("ERROR: " + ex.Message); return false; } return true; }
public void RunTextureJob(Mod.ModJob mj) { DbgPrint("Running Texture Replacement Job for: " + mj.respath); //Check Toc Files List<string> reducedTocPaths = new List<string>(); bool found = false; foreach (string p in mj.tocPaths) { found = false; for (int i = 0; i < reducedTocPaths.Count; i++) if (p == reducedTocPaths[i]) { found = true; break; } if (!found) reducedTocPaths.Add(p); } mj.tocPaths = reducedTocPaths; List<string> reducedBundlePaths = new List<string>(); foreach (string p in mj.bundlePaths) { found = false; for (int i = 0; i < reducedBundlePaths.Count; i++) if (p == reducedBundlePaths[i]) { found = true; break; } if (!found) reducedBundlePaths.Add(p); } mj.bundlePaths = reducedBundlePaths; foreach (string tocpath in mj.tocPaths) { if (tocpath.ToLower().Contains("\\patch\\")) continue; DbgPrint("Checking for : " + tocpath); if (!tocpath.ToLower().StartsWith("update")) { if (!File.Exists(outputPath + tocpath)) { string from = GlobalStuff.FindSetting("gamepath") + tocpath; string to = outputPath + tocpath; Directory.CreateDirectory(Path.GetDirectoryName(to) + "\\"); DbgPrint("TOC file not found, copying from base!"); try { File.Copy(from, to); } catch (Exception) { DbgPrint("Error: TOC file not found, can not copy from base!\n Tried to copy from:\n\t" + from + "\n to:\n\t" + to); return; } from = GlobalStuff.FindSetting("gamepath") + tocpath.ToLower().Replace(".toc",".sb"); to = outputPath + tocpath.ToLower().Replace(".toc", ".sb"); Directory.CreateDirectory(Path.GetDirectoryName(to) + "\\"); try { File.Copy(from, to); } catch (Exception) { DbgPrint("Error: SB file not found, can not copy from base!\n Tried to copy from:\n\t" + from + "\n to:\n\t" + to); return; } DbgPrint("Fixing layout.toc..."); TOCFile toc = new TOCFile(outputPath + "Data\\layout.toc"); BJSON.Entry root = toc.lines[0]; BJSON.Field sbun = root.FindField("superBundles"); List<BJSON.Entry> list = (List<BJSON.Entry>)sbun.data; BJSON.Entry ne = new BJSON.Entry(); ne.type = 0x82; ne.fields = new List<BJSON.Field>(); ne.fields.Add(new BJSON.Field(7, "name", tocpath.Replace(".toc", ""))); ne.fields.Add(new BJSON.Field(6, "delta", true)); list.Add(ne); sbun.data = list; toc.Save(); DbgPrint("SuperBundle added"); } } else { if (!File.Exists(outputPath + Helpers.SkipSubFolder(tocpath, 2))) { string from = GlobalStuff.FindSetting("gamepath") + tocpath; string to = outputPath + Helpers.SkipSubFolder(tocpath, 2); Directory.CreateDirectory(Path.GetDirectoryName(to) + "\\"); DbgPrint("TOC file not found, copying from base!"); try { File.Copy(from, to); } catch (Exception) { DbgPrint("Error: TOC file not found, can not copy from base!\n Tried to copy from:\n\t" + from + "\n to:\n\t" + to); return; } from = GlobalStuff.FindSetting("gamepath") + tocpath.ToLower().Replace(".toc", ".sb"); to = outputPath + Helpers.SkipSubFolder(tocpath, 2).ToLower().Replace(".toc", ".sb"); Directory.CreateDirectory(Path.GetDirectoryName(to) + "\\"); try { File.Copy(from, to); } catch (Exception) { DbgPrint("Error: SB file not found, can not copy from base!\n Tried to copy from:\n\t" + from + "\n to:\n\t" + to); return; } DbgPrint("Fixing layout.toc..."); TOCFile toc = new TOCFile(outputPath + "Data\\layout.toc"); BJSON.Entry root = toc.lines[0]; BJSON.Field sbun = root.FindField("superBundles"); List<BJSON.Entry> list = (List<BJSON.Entry>)sbun.data; BJSON.Entry ne = new BJSON.Entry(); ne.type = 0x82; ne.fields = new List<BJSON.Field>(); ne.fields.Add(new BJSON.Field(7, "name", Helpers.SkipSubFolder(tocpath, 2).Replace(".toc", ""))); ne.fields.Add(new BJSON.Field(6, "delta", true)); list.Add(ne); sbun.data = list; toc.Save(); DbgPrint("SuperBundle added"); } } } DbgPrint("All found."); //create cas data int newcompressedsize = 0; byte[] newsha1 = CreateCASContainer(mj.data, out newcompressedsize); if (newsha1.Length != 0x14) { DbgPrint("Error: could not create CAS data, aborting!"); return; } //walk through affected toc files foreach (string tocpath in mj.tocPaths) RunTextureResJob(mj, tocpath, newsha1, newcompressedsize); }
public void RunRessourceJobOnBundle(Mod.ModJob mj, TOCFile toc, string tocpath, byte[] newsha1, string bpath, int newcompressedsize) { GC.Collect(); int count = 0; int index = -1; foreach (TOCFile.TOCBundleInfoStruct buni in toc.bundles) if (count++ > -1 && bpath.ToLower() == buni.id.ToLower()) { DbgPrint(" Found bundle : " + bpath); index = count - 1; break; } //if bundle found if (index != -1) { if (!toc.iscas) { DbgPrint(" Warning: binary bundles not supported yet, skipping!"); return; } //find out if base or delta BJSON.Entry root = toc.lines[0]; BJSON.Field bundles = root.FindField("bundles"); BJSON.Entry bun = ((List<BJSON.Entry>)bundles.data)[index]; BJSON.Field isDeltaField = bun.FindField("delta"); BJSON.Field isBaseField = bun.FindField("base"); //if is base, copy from base, make delta and recompile if (isBaseField != null && (bool)isBaseField.data == true) if (!ImportBundleFromBase(toc, tocpath, index, bpath)) return; //check if already is in sb if (isDeltaField != null && (bool)isDeltaField.data == true) DbgPrint(" Its already a delta"); DbgPrint(" Updating SB file with new SHA1...");//yeah, pretty much string SBpath = outputPath + Path.GetDirectoryName(tocpath) + "\\" + Path.GetFileNameWithoutExtension(tocpath) + ".sb"; SBFile sb = new SBFile(SBpath); root = sb.lines[0]; bundles = root.FindField("bundles"); List<BJSON.Entry> bundle_list = (List<BJSON.Entry>)bundles.data; //find right bundle for (int i = 0; i < bundle_list.Count; i++) { bun = bundle_list[i]; BJSON.Field ebx = bun.FindField("ebx"); BJSON.Field res = bun.FindField("res"); BJSON.Field path = bun.FindField("path"); if (!(path != null && ((string)path.data).ToLower() == bpath.ToLower()) || res == null) continue; bool found = false; //find right res entry List<BJSON.Entry> res_list = (List<BJSON.Entry>)res.data; for (int j = 0; j < res_list.Count; j++) { BJSON.Entry res_e = res_list[j]; BJSON.Field f_sha1 = res_e.FindField("sha1"); BJSON.Field f_name = res_e.FindField("name"); BJSON.Field f_size = res_e.FindField("size"); BJSON.Field f_osize = res_e.FindField("originalSize"); BJSON.Field f_casPatchType = res_e.FindField("casPatchType"); if (f_name != null && ((string)f_name.data).ToLower() == mj.respath.ToLower() && f_sha1 != null) { //get res data byte[] sha1buff = (byte[])f_sha1.data; DbgPrint(" Found res sha1 : " + Helpers.ByteArrayToHexString(sha1buff)); f_sha1.data = newsha1; DbgPrint(" Replaced res sha1 with : " + Helpers.ByteArrayToHexString(newsha1)); DbgPrint(" Updating res size : " + mj.data.Length); f_size.data = BitConverter.GetBytes((long)newcompressedsize); f_osize.data = BitConverter.GetBytes((long)mj.data.Length); if (f_casPatchType != null) { if (BitConverter.ToInt32((byte[])f_casPatchType.data, 0) != 1) { DbgPrint(" CasPatchType: found and set to 1!"); f_casPatchType.data = BitConverter.GetBytes((int)1); } else DbgPrint(" CasPatchType: found and is fine!"); } else { f_casPatchType = new BJSON.Field(); f_casPatchType.fieldname = "casPatchType"; f_casPatchType.type = 8; f_casPatchType.data = BitConverter.GetBytes((int)1); res_e.fields.Add(f_casPatchType); DbgPrint(" CasPatchType: added and set to 1!"); } CalcTotalSize(bun); sb.Save(); DbgPrint(" Job successfull!"); found = true; } } if (!found) { DbgPrint(" cant find res, adding it!"); BJSON.Entry newres = new BJSON.Entry(); newres.type = 0x82; newres.fields = new List<BJSON.Field>(); newres.fields.Add(new BJSON.Field(7, "name", mj.respath)); newres.fields.Add(new BJSON.Field(0x10, "sha1", newsha1)); newres.fields.Add(new BJSON.Field(9, "size", BitConverter.GetBytes((long)newcompressedsize))); newres.fields.Add(new BJSON.Field(9, "originalSize", BitConverter.GetBytes((long)mj.data.Length))); newres.fields.Add(new BJSON.Field(0x8, "resType", Helpers.HexStringToByteArray(mj.restype))); newres.fields.Add(new BJSON.Field(0x13, "resMeta", new byte[0x10])); newres.fields.Add(new BJSON.Field(9, "resRid", BitConverter.GetBytes((long)0))); newres.fields.Add(new BJSON.Field(8, "casPatchType", BitConverter.GetBytes((int)1))); ((List<BJSON.Entry>)res.data).Add(newres); CalcTotalSize(bun); sb.Save(); DbgPrint(" Job successfull!"); break; } } } }