private void ReadFile() { FileStream fs = new FileStream(MyPath, FileMode.Open, FileAccess.Read); lines = new List <BJSON.Entry>(); BJSON.ReadEntries(fs, lines); fs.Close(); }
public void Save(string path) { FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write); foreach (BJSON.Entry e in lines) { BJSON.WriteEntry(fs, e); } fs.Close(); }
public static Bundle Create(BJSON.Entry e) { Bundle res = new Bundle(); res.iscas = true; res.ebx = new List<ebxtype>(); res.res = new List<restype>(); res.chunk = new List<chunktype>(); foreach (BJSON.Field f in e.fields) switch (f.fieldname) { case "path": res.path = (string)f.data; break; case "magicSalt": res.salt = BitConverter.ToUInt32((byte[])f.data, 0).ToString("X4"); break; case "alignMembers": res.align = (bool)f.data; break; case "storeCompressedSizes": res.compressed = (bool)f.data; break; case "totalSize": res.totalsize = BitConverter.ToUInt64((byte[])f.data, 0); break; case "dbxtotalSize": res.dbxtotalsize = BitConverter.ToUInt64((byte[])f.data, 0); break; case "ebx": res.ebx = ReadEbx(f); break; case "dbx": res.dbx = ReadDbx(f); break; case "res": res.res = ReadRes(f); break; case "chunks": case "chunks0": res.chunk.AddRange(ReadChunks(f)); break; } return res; }
private void WriteFile() { using (FileStream fs = new FileStream(MyPath, FileMode.Create, FileAccess.Write)) { Helpers.WriteInt(fs, magic); Helpers.WriteInt(fs, 0); fs.Write(serial, 0, serial.Length); if (magic == 0x03CED100) { byte[] buff = new byte[0x122]; fs.Write(buff, 0, 0x122); foreach (BJSON.Entry e in lines) { BJSON.WriteEntry(fs, e); } } else if (magic == 0x01CED100) { byte[] buff = new byte[0x1E]; fs.Write(buff, 0, 0x1E); fs.Write(xorKey, 0, 0x101); fs.WriteByte(0); fs.WriteByte(0); fs.WriteByte(0); MemoryStream m = new MemoryStream(); MemoryStream m2 = new MemoryStream(); foreach (BJSON.Entry e in lines) { BJSON.WriteEntry(m, e); } m.WriteByte(0); m2.WriteByte(0x81); Helpers.WriteLEB128(m2, (int)m.Length); m2.Write(m.ToArray(), 0, (int)m.Length); m2.Seek(0, 0); for (int i = 0; i < m2.Length; i++) { byte k = xorKey[i % 0x101]; byte b = (byte)m2.ReadByte(); fs.WriteByte((byte)(0x7B ^ k ^ b)); } } } }
public void Save(Stream s) { Helpers.WriteLEUInt(s, 0); Helpers.WriteLEUInt(s, Header.magic); Helpers.WriteLEInt(s, Sha1List.Count); Helpers.WriteLEInt(s, EbxList.Count); Helpers.WriteLEInt(s, ResList.Count); Helpers.WriteLEInt(s, ChunkList.Count); Helpers.WriteLEUInt(s, 0); Helpers.WriteLEUInt(s, 0); Helpers.WriteLEUInt(s, 0); WriteSha1List(s); long ebxpos = s.Position; WriteEbxList(s); WriteResList(s); WriteChunkList(s); long ch_start = s.Position - 4; if (Header.chunkCount != 0) { BJSON.WriteField(s, ChunkMeta); } long st_start = s.Position - 4; WriteStringTable(s); long tpos = s.Position; //Name Fix Up s.Seek(ebxpos, 0); WriteEbxList(s); //Header Size Fix Up s.Seek(0, 0); Helpers.WriteLEUInt(s, (uint)tpos - 4); s.Seek(0x18, 0); Helpers.WriteLEUInt(s, (uint)st_start); //String Table Offset Helpers.WriteLEUInt(s, (uint)ch_start); //chunkMeta Offset Helpers.WriteLEUInt(s, (uint)(st_start - ch_start)); //chunkMeta Size s.Seek(tpos, 0); WriteEbxListData(s); WriteResListData(s); WriteChunkListData(s); }
public void CalcTotalSize(BJSON.Entry bun) { BJSON.Field totalsize = bun.FindField("totalSize"); BJSON.Field ebx = bun.FindField("ebx"); BJSON.Field res = bun.FindField("res"); BJSON.Field chunks = bun.FindField("chunks"); long size = 0; if (ebx != null) foreach (BJSON.Entry e in (List<BJSON.Entry>)ebx.data) size += BitConverter.ToInt64((byte[])e.FindField("size").data, 0); if (res != null) foreach (BJSON.Entry e in (List<BJSON.Entry>)res.data) size += BitConverter.ToInt64((byte[])e.FindField("size").data, 0); if (chunks != null) foreach (BJSON.Entry e in (List<BJSON.Entry>)chunks.data) size += BitConverter.ToInt32((byte[])e.FindField("size").data, 0); totalsize.data = BitConverter.GetBytes(size); DbgPrint(" Total size calculated: " + size.ToString("X")); }
private void ReadFile() { using (FileStream fs = new FileStream(MyPath, FileMode.Open, FileAccess.Read)) { magic = Helpers.ReadInt(fs); if (magic != 0x03CED100 && magic != 0x01CED100) { return; } byte b = (byte)fs.ReadByte(); while (b == 0) { b = (byte)fs.ReadByte(); } MemoryStream m = new MemoryStream(); m.WriteByte(b); for (int i = 0; i < 0x101; i++) { m.WriteByte((byte)fs.ReadByte()); } serial = m.ToArray(); if (magic == 0x03CED100) { fs.Seek(0x22C, 0); lines = new List <BJSON.Entry>(); BJSON.ReadEntries(fs, lines); } else if (magic == 0x01CED100) { fs.Seek(0x128, SeekOrigin.Begin); fs.Read(xorKey, 0, 257); fs.Seek(3, SeekOrigin.Current); unxoredStream = new MemoryStream(); BuildUnxoredStream(fs, unxoredStream); unxoredStream.Seek(0, SeekOrigin.Begin); Helpers.ReadUInt(unxoredStream); lines = new List <BJSON.Entry>(); BJSON.ReadEntries(unxoredStream, lines); } } }
public void Load(Stream data, bool fast = false) { uint headersize = Helpers.ReadLEUInt(data); long startoffset = data.Position; Header = new HeaderStruct(); Header.magic = Helpers.ReadLEUInt(data); Header.totalCount = Helpers.ReadLEUInt(data); Header.ebxCount = Helpers.ReadLEUInt(data); Header.resCount = Helpers.ReadLEUInt(data); Header.chunkCount = Helpers.ReadLEUInt(data); Header.stringOffset = Helpers.ReadLEUInt(data); Header.chunkMetaOffset = Helpers.ReadLEUInt(data); Header.chunkMetaSize = Helpers.ReadLEUInt(data); ReadSha1List(data); ReadEbxList(data); ReadResList(data); ReadChunkList(data); if (Header.chunkCount != 0) { ChunkMeta = BJSON.ReadField(data); } else { ChunkMeta = null; } data.Seek(startoffset + Header.stringOffset, 0); ReadEbxListNames(data); data.Seek(startoffset + Header.stringOffset, 0); ReadResListNames(data); data.Seek(startoffset + headersize, 0); ReadEbxListData(data, fast); ReadResListData(data, fast); ReadChunkListData(data, fast); ApplySHA1s(); }
private static List<restype> ReadRes(BJSON.Field f) { List<restype> res = new List<restype>(); List<BJSON.Entry> list = (List<BJSON.Entry>)f.data; foreach (BJSON.Entry e in list) { restype r = new restype(); r.link = e; foreach (BJSON.Field f2 in e.fields) switch (f2.fieldname) { case "name": r.name = (string)f2.data; break; case "sha1": r.SHA1 = (byte[])f2.data; break; case "size": r.size = (byte[])f2.data; break; case "originalSize": r.osize = (byte[])f2.data; break; case "resType": r.rtype = (byte[])f2.data; break; case "casPatchType": r.casPatchType = Helpers.ReadInt(new MemoryStream((byte[])f2.data)); break; case "baseSha1": r.baseSha1 = (byte[])f2.data; break; case "deltaSha1": r.deltaSha1 = (byte[])f2.data; break; } res.Add(r); } return res; }
private static List<ebxtype> ReadEbx(BJSON.Field f) { List<ebxtype> res = new List<ebxtype>(); List<BJSON.Entry> list = (List<BJSON.Entry>)f.data; foreach (BJSON.Entry e in list) { ebxtype ebx = new ebxtype(); ebx.link = e; ebx.baseSha1 = ebx.deltaSha1 = ebx.Sha1 = new byte[0]; foreach (BJSON.Field f2 in e.fields) switch (f2.fieldname) { case "name": ebx.name = (string)f2.data; break; case "sha1": ebx.Sha1 = (byte[])f2.data; break; case "size": ebx.size = (byte[])f2.data; break; case "originalSize": ebx.originalSize = (byte[])f2.data; break; case "casPatchType": ebx.casPatchType = Helpers.ReadInt(new MemoryStream((byte[])f2.data)); break; case "baseSha1": ebx.baseSha1 = (byte[])f2.data; break; case "deltaSha1": ebx.deltaSha1 = (byte[])f2.data; break; } res.Add(ebx); } return res; }
private static List<dbxtype> ReadDbx(BJSON.Field f) { List<dbxtype> res = new List<dbxtype>(); List<BJSON.Entry> list = (List<BJSON.Entry>)f.data; foreach (BJSON.Entry e in list) { dbxtype dbx = new dbxtype(); dbx.link = e; foreach (BJSON.Field f2 in e.fields) switch (f2.fieldname) { case "name": dbx.name = (string)f2.data; break; case "sha1": dbx.SHA1 = (byte[])f2.data; break; case "size": dbx.size = (byte[])f2.data; break; case "originalSize": dbx.osize = (byte[])f2.data; break; } res.Add(dbx); } return res; }
private static List<chunktype> ReadChunks(BJSON.Field f) { List<chunktype> res = new List<chunktype>(); List<BJSON.Entry> list = (List<BJSON.Entry>)f.data; foreach (BJSON.Entry e in list) { chunktype c = new chunktype(); c.link = e; foreach (BJSON.Field f2 in e.fields) switch (f2.fieldname) { case "id": c.id = (byte[])f2.data; break; case "sha1": c.SHA1 = (byte[])f2.data; break; case "size": c.size = (byte[])f2.data; break; case "casPatchType": c.casPatchType = Helpers.ReadInt(new MemoryStream((byte[])f2.data)); break; case "baseSha1": c.baseSha1 = (byte[])f2.data; break; case "deltaSha1": c.deltaSha1 = (byte[])f2.data; break; } res.Add(c); } return res; }
private void ProcessChunks(BJSON.Field f) { List<BJSON.Entry> list = (List<BJSON.Entry>)f.data; foreach (BJSON.Entry e in list) { TOCChunkInfoStruct info = new TOCChunkInfoStruct(); foreach(BJSON.Field f2 in e.fields) switch (f2.fieldname) { case "id": info.id = (byte[])f2.data; break; case "sha1": info.sha1 = (byte[])f2.data; break; case "offset": info.offset = Helpers.ReadInt(new MemoryStream((byte[])f2.data)); break; case "size": info.size = Helpers.ReadInt(new MemoryStream((byte[])f2.data)); break; } chunks.Add(info); } }
private void ProcessBundles(BJSON.Field f) { List<BJSON.Entry> list = (List<BJSON.Entry>)f.data; foreach (BJSON.Entry e in list) { TOCBundleInfoStruct info = new TOCBundleInfoStruct(); info.isdelta = false; foreach(BJSON.Field f2 in e.fields) switch (f2.fieldname) { case "id": info.id = (string)f2.data; break; case "offset": info.offset = Helpers.ReadInt(new MemoryStream((byte[])f2.data)); break; case "size": info.size = Helpers.ReadInt(new MemoryStream((byte[])f2.data)); break; case "delta": info.isdelta = (bool)f2.data; break; case "base": info.isbase = (bool)f2.data; break; } bundles.Add(info); } }