public void PreviewFile(string path) { string ext = Path.GetExtension(path).ToLower(); TOCFile toc; switch (ext) { case ".toc": toc = new TOCFile(path); if (toc != null && toc.lines != null) Helpers.FillTreeFast(treeView2, toc.lines); break; case ".sb": string tocpath = Helpers.GetFileNameWithOutExtension(path) + ".toc"; if (File.Exists(tocpath)) { toc = new TOCFile(tocpath); if (toc != null && toc.lines != null && toc.lines.Count != 0) foreach (BJSON.Field f in toc.lines[0].fields) if (f.fieldname == "cas" && (bool)f.data) { SBFile sb = new SBFile(path); if (sb != null && sb.lines != null) Helpers.FillTreeFast(treeView2, sb.lines); return; } } break; } }
public void LoadFile(string path) { this.Text = "SB Tool - " + path; toc = new TOCFile(Helpers.GetFileNameWithOutExtension(path) + ".toc"); if (toc.iscas) { toolStrip1.Visible = true; splitContainer2.BringToFront(); sb = new SBFile(path); RefreshTree(); } else { toolStrip1.Visible = false; tabControl1.BringToFront(); RefreshBinary(); } }
private void CompareSB(string file1, string file2) { if (!File.Exists(file1) || !File.Exists(file2)) return; SBFile sb1 = new SBFile(file1); SBFile sb2 = new SBFile(file2); foreach (Bundle b in sb1.bundles) { Bundle b2 = null; foreach(Bundle c in sb2.bundles) if (c.path == b.path) { b2 = c; break; } if (b2 == null) continue; StringBuilder sb = new StringBuilder(); sb.Append(" D:bundle " + b.path + "\n"); bool diff = false; foreach (Bundle.ebxtype ebx in b.ebx) { Bundle.ebxtype ebx2 = new Bundle.ebxtype(); ebx2.name = ""; foreach (Bundle.ebxtype compare in b2.ebx) if (ebx.name == compare.name) { ebx2 = compare; break; } if (ebx2.name == "") { sb.Append(" E:ebx not found - " + ebx.name + "\n"); continue; } sb.Append(CompareEBX(ebx, ebx2, diff, out diff)); } foreach (Bundle.restype res in b.res) { Bundle.restype res2 = new Bundle.restype(); res2.name = ""; foreach (Bundle.restype compare in b2.res) if (res.name == compare.name) { res2 = compare; break; } if (res2.name == "") { sb.Append(" E:res not found - " + res.name + "\n"); continue; } sb.Append(CompareRES(res, res2, diff, out diff)); } foreach (Bundle.chunktype chunk in b.chunk) { Bundle.chunktype chunk2 = new Bundle.chunktype(); chunk2.id = new byte[0]; foreach (Bundle.chunktype compare in b2.chunk) if (Helpers.ByteArrayCompare(chunk.id,compare.id)) { chunk2 = compare; break; } if (chunk2.id.Length == 0) { sb.Append(" E:chunk not found - " + Helpers.ByteArrayToHexString(chunk.id) + "\n"); continue; } sb.Append(CompareCHUNK(chunk, chunk2, diff, out diff)); } if (diff) P(sb.ToString()); } }
public void RunTextureResJobOnBundle(Mod.ModJob mj, TOCFile toc, string tocpath, byte[] newsha1, string bpath, int newcompressedsize) { GC.Collect(); int count = 0; int index = -1; foreach (TOCFile.TOCBundleInfoStruct buni in toc.bundles) if (count++ > -1 && bpath == buni.id) { DbgPrint(" Found bundle : " + bpath); index = count - 1; break; } //if bundle found if (index != -1) { if (!toc.iscas) { DbgPrint(" Warning: binary bundles not supported yet, skipping!"); return; } //find out if base or delta BJSON.Entry root = toc.lines[0]; BJSON.Field bundles = root.FindField("bundles"); BJSON.Entry bun = ((List<BJSON.Entry>)bundles.data)[index]; BJSON.Field isDeltaField = bun.FindField("delta"); BJSON.Field isBaseField = bun.FindField("base"); //if is base, copy from base, make delta and recompile if (isBaseField != null && (bool)isBaseField.data == true) if (!ImportBundleFromBase(toc, tocpath, index, bpath)) return; //check if already is in sb if (isDeltaField != null && (bool)isDeltaField.data == true) DbgPrint(" Its already a delta"); DbgPrint(" Updating SB file with new SHA1...");//yeah, pretty much string SBpath = outputPath + Path.GetDirectoryName(tocpath) + "\\" + Path.GetFileNameWithoutExtension(tocpath) + ".sb"; SBFile sb = new SBFile(SBpath); root = sb.lines[0]; bundles = root.FindField("bundles"); List<BJSON.Entry> bundle_list =(List<BJSON.Entry>)bundles.data; //find right bundle for (int i = 0; i < bundle_list.Count; i++) { bun = bundle_list[i]; BJSON.Field ebx = bun.FindField("ebx"); BJSON.Field res = bun.FindField("res"); BJSON.Field chunks = bun.FindField("chunks"); BJSON.Field path = bun.FindField("path"); if (!(path != null && (string)path.data == bpath) || res == null || chunks == null) continue; bool found = false; byte[] chunkidbuff = new byte[16]; byte[] newchunkid = new byte[16]; //find right res entry List<BJSON.Entry> res_list = (List<BJSON.Entry>)res.data; for (int j = 0; j < res_list.Count; j++) { BJSON.Entry res_e = res_list[j]; BJSON.Field f_sha1 = res_e.FindField("sha1"); BJSON.Field f_name = res_e.FindField("name"); BJSON.Field f_size = res_e.FindField("size"); BJSON.Field f_osize = res_e.FindField("originalSize"); BJSON.Field f_casPatchType = res_e.FindField("casPatchType"); if (f_name != null && (string)f_name.data == mj.respath && f_sha1 != null) { //get res data and extract chunk id byte[] sha1buff = (byte[])f_sha1.data; DbgPrint(" Found res sha1 : " + Helpers.ByteArrayToHexString(sha1buff)); byte[] resdata = SHA1Access.GetDataBySha1(sha1buff); if (resdata.Length == 0) { DbgPrint(" Error: cant find res data, skipping!"); break; } for (int k = 0; k < 16; k++) chunkidbuff[k] = resdata[k + 0x1C]; DbgPrint(" Found chunk id : " + Helpers.ByteArrayToHexString(chunkidbuff)); newchunkid = Guid.NewGuid().ToByteArray(); DbgPrint(" Creating new chunk id : " + Helpers.ByteArrayToHexString(newchunkid)); for (int k = 0; k < 16; k++) resdata[k + 0x1C] = newchunkid[k]; int newrescompsize = 0; byte[] newressha1 = CreateCASContainer(resdata, out newrescompsize, " "); DbgPrint(" Creating new res sha1 : " + Helpers.ByteArrayToHexString(newressha1)); f_sha1.data = newressha1; DbgPrint(" Updating res size : " + resdata.Length); f_size.data = BitConverter.GetBytes((long)newrescompsize); f_osize.data = BitConverter.GetBytes((long)resdata.Length); if (f_casPatchType != null) { if (BitConverter.ToInt32((byte[])f_casPatchType.data, 0) != 1) { DbgPrint(" CasPatchType: found and set to 1!"); f_casPatchType.data = BitConverter.GetBytes((int)1); } else DbgPrint(" CasPatchType: found and is fine!"); } else { f_casPatchType = new BJSON.Field(); f_casPatchType.fieldname = "casPatchType"; f_casPatchType.type = 8; f_casPatchType.data = BitConverter.GetBytes((int)1); res_e.fields.Add(f_casPatchType); DbgPrint(" CasPatchType: added and set to 1!"); } found = true; } } if (!found) { DbgPrint(" Error: cant find res, skipping!"); break; } found = false; //find right chunk entry List<BJSON.Entry> chunk_list = (List<BJSON.Entry>)chunks.data; for (int j = 0; j < chunk_list.Count; j++) { BJSON.Entry chunk_e = chunk_list[j]; BJSON.Field f_id = chunk_e.FindField("id"); BJSON.Field f_size = chunk_e.FindField("size"); BJSON.Field f_rangeStart = chunk_e.FindField("rangeStart"); BJSON.Field f_rangeEnd = chunk_e.FindField("rangeEnd"); BJSON.Field f_logicalOffset = chunk_e.FindField("logicalOffset"); BJSON.Field f_logicalSize = chunk_e.FindField("logicalSize"); BJSON.Field f2_sha1 = chunk_e.FindField("sha1"); BJSON.Field f_casPatchType2 = chunk_e.FindField("casPatchType"); if (f_id != null && Helpers.ByteArrayCompare((byte[])f_id.data, chunkidbuff)) { DbgPrint(" Found chunk"); f_id.data = newchunkid; found = true; if (f_casPatchType2 != null) { if (BitConverter.ToInt32((byte[])f_casPatchType2.data, 0) != 1) { DbgPrint(" CasPatchType: found and set to 1!"); f_casPatchType2.data = BitConverter.GetBytes((int)1); } else DbgPrint(" CasPatchType: found and is fine!"); } else { f_casPatchType2 = new BJSON.Field(); f_casPatchType2.fieldname = "casPatchType"; f_casPatchType2.type = 8; f_casPatchType2.data = BitConverter.GetBytes((int)1); chunk_e.fields.Add(f_casPatchType2); DbgPrint(" CasPatchType: added and set to 1!"); } f_size.data = BitConverter.GetBytes(newcompressedsize); if (f_rangeStart != null) f_rangeStart.data = BitConverter.GetBytes((int)0); if (f_rangeEnd != null) f_rangeEnd.data = BitConverter.GetBytes(newcompressedsize); if (f_logicalOffset != null) f_logicalOffset.data = BitConverter.GetBytes((int)0); if (f_logicalSize != null) f_logicalSize.data = BitConverter.GetBytes(mj.data.Length); f2_sha1.data = newsha1; DbgPrint(" Updated chunk size : " + mj.data.Length); CalcTotalSize(bun); sb.Save(); found = true; DbgPrint(" Replaced chunk sha1 and saved SB file"); DbgPrint(" Job successfull!"); break; } } if (!found) DbgPrint(" Error: Could not find Chunk by id"); } } }
public void RunRessourceJobOnBundle(Mod.ModJob mj, TOCFile toc, string tocpath, byte[] newsha1, string bpath, int newcompressedsize) { GC.Collect(); int count = 0; int index = -1; foreach (TOCFile.TOCBundleInfoStruct buni in toc.bundles) if (count++ > -1 && bpath.ToLower() == buni.id.ToLower()) { DbgPrint(" Found bundle : " + bpath); index = count - 1; break; } //if bundle found if (index != -1) { if (!toc.iscas) { DbgPrint(" Warning: binary bundles not supported yet, skipping!"); return; } //find out if base or delta BJSON.Entry root = toc.lines[0]; BJSON.Field bundles = root.FindField("bundles"); BJSON.Entry bun = ((List<BJSON.Entry>)bundles.data)[index]; BJSON.Field isDeltaField = bun.FindField("delta"); BJSON.Field isBaseField = bun.FindField("base"); //if is base, copy from base, make delta and recompile if (isBaseField != null && (bool)isBaseField.data == true) if (!ImportBundleFromBase(toc, tocpath, index, bpath)) return; //check if already is in sb if (isDeltaField != null && (bool)isDeltaField.data == true) DbgPrint(" Its already a delta"); DbgPrint(" Updating SB file with new SHA1...");//yeah, pretty much string SBpath = outputPath + Path.GetDirectoryName(tocpath) + "\\" + Path.GetFileNameWithoutExtension(tocpath) + ".sb"; SBFile sb = new SBFile(SBpath); root = sb.lines[0]; bundles = root.FindField("bundles"); List<BJSON.Entry> bundle_list = (List<BJSON.Entry>)bundles.data; //find right bundle for (int i = 0; i < bundle_list.Count; i++) { bun = bundle_list[i]; BJSON.Field ebx = bun.FindField("ebx"); BJSON.Field res = bun.FindField("res"); BJSON.Field path = bun.FindField("path"); if (!(path != null && ((string)path.data).ToLower() == bpath.ToLower()) || res == null) continue; bool found = false; //find right res entry List<BJSON.Entry> res_list = (List<BJSON.Entry>)res.data; for (int j = 0; j < res_list.Count; j++) { BJSON.Entry res_e = res_list[j]; BJSON.Field f_sha1 = res_e.FindField("sha1"); BJSON.Field f_name = res_e.FindField("name"); BJSON.Field f_size = res_e.FindField("size"); BJSON.Field f_osize = res_e.FindField("originalSize"); BJSON.Field f_casPatchType = res_e.FindField("casPatchType"); if (f_name != null && ((string)f_name.data).ToLower() == mj.respath.ToLower() && f_sha1 != null) { //get res data byte[] sha1buff = (byte[])f_sha1.data; DbgPrint(" Found res sha1 : " + Helpers.ByteArrayToHexString(sha1buff)); f_sha1.data = newsha1; DbgPrint(" Replaced res sha1 with : " + Helpers.ByteArrayToHexString(newsha1)); DbgPrint(" Updating res size : " + mj.data.Length); f_size.data = BitConverter.GetBytes((long)newcompressedsize); f_osize.data = BitConverter.GetBytes((long)mj.data.Length); if (f_casPatchType != null) { if (BitConverter.ToInt32((byte[])f_casPatchType.data, 0) != 1) { DbgPrint(" CasPatchType: found and set to 1!"); f_casPatchType.data = BitConverter.GetBytes((int)1); } else DbgPrint(" CasPatchType: found and is fine!"); } else { f_casPatchType = new BJSON.Field(); f_casPatchType.fieldname = "casPatchType"; f_casPatchType.type = 8; f_casPatchType.data = BitConverter.GetBytes((int)1); res_e.fields.Add(f_casPatchType); DbgPrint(" CasPatchType: added and set to 1!"); } CalcTotalSize(bun); sb.Save(); DbgPrint(" Job successfull!"); found = true; } } if (!found) { DbgPrint(" cant find res, adding it!"); BJSON.Entry newres = new BJSON.Entry(); newres.type = 0x82; newres.fields = new List<BJSON.Field>(); newres.fields.Add(new BJSON.Field(7, "name", mj.respath)); newres.fields.Add(new BJSON.Field(0x10, "sha1", newsha1)); newres.fields.Add(new BJSON.Field(9, "size", BitConverter.GetBytes((long)newcompressedsize))); newres.fields.Add(new BJSON.Field(9, "originalSize", BitConverter.GetBytes((long)mj.data.Length))); newres.fields.Add(new BJSON.Field(0x8, "resType", Helpers.HexStringToByteArray(mj.restype))); newres.fields.Add(new BJSON.Field(0x13, "resMeta", new byte[0x10])); newres.fields.Add(new BJSON.Field(9, "resRid", BitConverter.GetBytes((long)0))); newres.fields.Add(new BJSON.Field(8, "casPatchType", BitConverter.GetBytes((int)1))); ((List<BJSON.Entry>)res.data).Add(newres); CalcTotalSize(bun); sb.Save(); DbgPrint(" Job successfull!"); break; } } } }
public bool ImportBundleFromBase(TOCFile toc, string tocpath, int index, string bpath) { DbgPrint(" Its a base reference! Copying in from base..."); //Find base toc string basepath = GlobalStuff.FindSetting("gamepath"); if (!File.Exists(basepath + tocpath)) { DbgPrint("Error: base TOC file not found, skipping!"); return false; } TOCFile otoc = new TOCFile(basepath + tocpath); //get base bundle data byte[] buff = otoc.ExportBundleDataByPath(bpath); if (buff.Length == 0) { DbgPrint("Error: base bundle not found, skipping!"); return false; } //get old sb file string oldSBpath = outputPath + Path.GetDirectoryName(tocpath) + "\\" + Path.GetFileNameWithoutExtension(tocpath) + ".sb"; if (!File.Exists(oldSBpath)) { DbgPrint("Error: patch SB file not found, skipping!"); return false; } DbgPrint(" Got copy, recompiling..."); //recompiling new sb in memory MemoryStream newSB = new MemoryStream(); FileStream oldSB = new FileStream(oldSBpath, FileMode.Open, FileAccess.Read); long glob_off = 0; BJSON.Entry root = toc.lines[0]; BJSON.Field bundles = root.fields[0]; int count = ((List<BJSON.Entry>)bundles.data).Count(); DbgPrint(" Recompiling SB..."); //put one bundle after another that is not base as defined in toc for (int i = 0; i < count; i++) { //get entry infos BJSON.Entry b = ((List<BJSON.Entry>)bundles.data)[i]; BJSON.Field f_offset = b.FindField("offset"); BJSON.Field f_size = b.FindField("size"); BJSON.Field f_isBase = b.FindField("base"); //if not our target and not copied from base, copy from old SB if (i != index && f_isBase == null) { int size = BitConverter.ToInt32((byte[])f_size.data, 0); CopyFileStream(oldSB, newSB, BitConverter.ToInt64((byte[])f_offset.data, 0), size); f_offset.data = BitConverter.GetBytes(glob_off); glob_off += size; } //if target, replace data, make delta if (i == index) { f_offset.data = BitConverter.GetBytes(glob_off); f_size.data = BitConverter.GetBytes(buff.Length); f_isBase.fieldname = "delta"; newSB.Write(buff, 0, buff.Length); glob_off += buff.Length; } } oldSB.Close(); //rebuilding new SB oldSB = new FileStream(oldSBpath, FileMode.Create, FileAccess.Write); //creating bundle header field MemoryStream t = new MemoryStream(); Helpers.WriteLEB128(t, (int)newSB.Length); newSB.WriteByte(0); int varsize = (int)t.Length; //add root entry oldSB.WriteByte(0x82); Helpers.WriteLEB128(oldSB, varsize + 9 + (int)newSB.Length); byte[] buff2 = { 0x01, 0x62, 0x75, 0x6E, 0x64, 0x6C, 0x65, 0x73, 0x00 }; oldSB.Write(buff2, 0, 9); oldSB.Write(t.ToArray(), 0, varsize); //header done, grab header offset and put all bundles int rel_off = (int)oldSB.Position; oldSB.Write(newSB.ToArray(), 0, (int)newSB.Length); oldSB.Close(); //removing idata DbgPrint(" removing idata..."); SBFile sb = new SBFile(oldSBpath); BJSON.Entry newroot = sb.lines[0]; BJSON.Field newbundles = newroot.FindField("bundles"); List<BJSON.Entry> newbun_list = (List<BJSON.Entry>)newbundles.data; for (int i = 0; i < newbun_list.Count; i++) { BJSON.Field f_res = newbun_list[i].FindField("res"); List<BJSON.Entry> newres_list = (List<BJSON.Entry>)f_res.data; for (int j = 0; j < newres_list.Count; j++) newres_list[j].RemoveField("idata"); } sb.Save(); DbgPrint(" Recompiling TOC..."); //correct offsets in toc by new adding header offset count = ((List<BJSON.Entry>)bundles.data).Count(); for (int i = 0; i < count; i++) { BJSON.Entry b = ((List<BJSON.Entry>)bundles.data)[i]; BJSON.Field f_offset = b.FindField("offset"); BJSON.Field f_isBase = b.FindField("base"); //if is in sb file, update if (f_isBase == null) { long off = BitConverter.ToInt64((byte[])f_offset.data, 0); off += rel_off; f_offset.data = BitConverter.GetBytes(off); } } toc.Save(); DbgPrint(" Bundle imported"); return true; }
public void UpdateTOC(TOCFile toc) { string path = toc.MyPath; path = path.ToLower().Replace(".toc",".sb"); if (!toc.iscas) return; SBFile sb = new SBFile(path); BJSON.Entry root = sb.lines[0]; BJSON.Field bundles = root.FindField("bundles"); BJSON.Entry tocroot = toc.lines[0]; BJSON.Field tocbundles = tocroot.FindField("bundles"); foreach (BJSON.Entry bun in (List<BJSON.Entry>)bundles.data) { BJSON.Field f = bun.FindField("path"); string bunpath = (string)f.data; foreach (BJSON.Entry tocbun in (List<BJSON.Entry>)tocbundles.data) { BJSON.Field f2 = tocbun.FindField("id"); string bunpath2 = (string)f2.data; if (bunpath == bunpath2) { BJSON.Field offset = tocbun.FindField("offset"); offset.data = BitConverter.GetBytes(bun.offset); MemoryStream m = new MemoryStream(); BJSON.WriteEntry(m, bun); string p = Path.GetDirectoryName(path); File.WriteAllBytes(p + "\\" + Path.GetFileName(bunpath) + ".bin", m.ToArray()); BJSON.Field size = tocbun.FindField("size"); size.data = BitConverter.GetBytes((uint)m.Length); } } } toc.Save(); DbgPrint(" TOC file offsets updated."); }
private void openRawBundleToolStripMenuItem_Click(object sender, EventArgs e) { OpenFileDialog d = new OpenFileDialog(); d.Filter = "*.bundle|*.bundle"; if (d.ShowDialog() == System.Windows.Forms.DialogResult.OK) { toolStrip1.Visible = true; splitContainer2.BringToFront(); sb = new SBFile(d.FileName); RefreshTree(); return; } }