Example #1
0
        private void ProcessChunks(BJSON.Field f)
        {
            List <BJSON.Entry> list = (List <BJSON.Entry>)f.data;

            foreach (BJSON.Entry e in list)
            {
                TOCChunkInfoStruct info = new TOCChunkInfoStruct();
                foreach (BJSON.Field f2 in e.fields)
                {
                    switch (f2.fieldname)
                    {
                    case "id":
                        info.id = (byte[])f2.data;
                        break;

                    case "sha1":
                        info.sha1 = (byte[])f2.data;
                        break;

                    case "offset":
                        info.offset = Helpers.ReadInt(new MemoryStream((byte[])f2.data));
                        break;

                    case "size":
                        info.size = Helpers.ReadInt(new MemoryStream((byte[])f2.data));
                        break;
                    }
                }
                chunks.Add(info);
            }
        }
Example #2
0
 public void Load(Stream data, bool fast = false)
 {
     uint headersize = Helpers.ReadLEUInt(data);
     long startoffset = data.Position;
     Header = new HeaderStruct();
     Header.magic = Helpers.ReadLEUInt(data);
     Header.totalCount = Helpers.ReadLEUInt(data);
     Header.ebxCount = Helpers.ReadLEUInt(data);
     Header.resCount = Helpers.ReadLEUInt(data);
     Header.chunkCount = Helpers.ReadLEUInt(data);
     Header.stringOffset = Helpers.ReadLEUInt(data);
     Header.chunkMetaOffset = Helpers.ReadLEUInt(data);
     Header.chunkMetaSize = Helpers.ReadLEUInt(data);
     ReadSha1List(data);
     ReadEbxList(data);
     ReadResList(data);
     ReadChunkList(data);
     if (Header.chunkCount != 0)
         ChunkMeta = BJSON.ReadField(data);
     else
         ChunkMeta = null;
     data.Seek(startoffset + Header.stringOffset, 0);
     ReadEbxListNames(data);
     data.Seek(startoffset + Header.stringOffset, 0);
     ReadResListNames(data);
     data.Seek(startoffset + headersize, 0);
     ReadEbxListData(data, fast);
     ReadResListData(data, fast);
     ReadChunkListData(data, fast);
     ApplySHA1s();
 }
Example #3
0
        private static List <dbxtype> ReadDbx(BJSON.Field f)
        {
            List <dbxtype>     res  = new List <dbxtype>();
            List <BJSON.Entry> list = (List <BJSON.Entry>)f.data;

            foreach (BJSON.Entry e in list)
            {
                dbxtype dbx = new dbxtype();
                dbx.link = e;
                foreach (BJSON.Field f2 in e.fields)
                {
                    switch (f2.fieldname)
                    {
                    case "name":
                        dbx.name = (string)f2.data;
                        break;

                    case "sha1":
                        dbx.SHA1 = (byte[])f2.data;
                        break;

                    case "size":
                        dbx.size = (byte[])f2.data;
                        break;

                    case "originalSize":
                        dbx.osize = (byte[])f2.data;
                        break;
                    }
                }
                res.Add(dbx);
            }
            return(res);
        }
Example #4
0
        private static List <restype> ReadRes(BJSON.Field f)
        {
            List <restype>     res  = new List <restype>();
            List <BJSON.Entry> list = (List <BJSON.Entry>)f.data;

            foreach (BJSON.Entry e in list)
            {
                restype r = new restype();
                r.link = e;
                foreach (BJSON.Field f2 in e.fields)
                {
                    switch (f2.fieldname)
                    {
                    case "name":
                        r.name = (string)f2.data;
                        break;

                    case "sha1":
                        r.SHA1 = (byte[])f2.data;
                        break;

                    case "size":
                        r.size = (byte[])f2.data;
                        break;

                    case "originalSize":
                        r.osize = (byte[])f2.data;
                        break;

                    case "resType":
                        r.rtype = (byte[])f2.data;
                        break;

                    case "casPatchType":
                        r.casPatchType = Helpers.ReadInt(new MemoryStream((byte[])f2.data));
                        break;

                    case "baseSha1":
                        r.baseSha1 = (byte[])f2.data;
                        break;

                    case "deltaSha1":
                        r.deltaSha1 = (byte[])f2.data;
                        break;
                    }
                }
                res.Add(r);
            }
            return(res);
        }
Example #5
0
        private static List <ebxtype> ReadEbx(BJSON.Field f)
        {
            List <ebxtype>     res  = new List <ebxtype>();
            List <BJSON.Entry> list = (List <BJSON.Entry>)f.data;

            foreach (BJSON.Entry e in list)
            {
                ebxtype ebx = new ebxtype();
                ebx.link     = e;
                ebx.baseSha1 = ebx.deltaSha1 = ebx.Sha1 = new byte[0];
                foreach (BJSON.Field f2 in e.fields)
                {
                    switch (f2.fieldname)
                    {
                    case "name":
                        ebx.name = (string)f2.data;
                        break;

                    case "sha1":
                        ebx.Sha1 = (byte[])f2.data;
                        break;

                    case "size":
                        ebx.size = (byte[])f2.data;
                        break;

                    case "originalSize":
                        ebx.originalSize = (byte[])f2.data;
                        break;

                    case "casPatchType":
                        ebx.casPatchType = Helpers.ReadInt(new MemoryStream((byte[])f2.data));
                        break;

                    case "baseSha1":
                        ebx.baseSha1 = (byte[])f2.data;
                        break;

                    case "deltaSha1":
                        ebx.deltaSha1 = (byte[])f2.data;
                        break;
                    }
                }
                res.Add(ebx);
            }
            return(res);
        }
Example #6
0
        private static List <chunktype> ReadChunks(BJSON.Field f)
        {
            List <chunktype>   res  = new List <chunktype>();
            List <BJSON.Entry> list = (List <BJSON.Entry>)f.data;

            foreach (BJSON.Entry e in list)
            {
                chunktype c = new chunktype();
                c.link = e;
                foreach (BJSON.Field f2 in e.fields)
                {
                    switch (f2.fieldname)
                    {
                    case "id":
                        c.id = (byte[])f2.data;
                        break;

                    case "sha1":
                        c.SHA1 = (byte[])f2.data;
                        break;

                    case "size":
                        c.size = (byte[])f2.data;
                        break;

                    case "casPatchType":
                        c.casPatchType = Helpers.ReadInt(new MemoryStream((byte[])f2.data));
                        break;

                    case "baseSha1":
                        c.baseSha1 = (byte[])f2.data;
                        break;

                    case "deltaSha1":
                        c.deltaSha1 = (byte[])f2.data;
                        break;
                    }
                }
                res.Add(c);
            }
            return(res);
        }
Example #7
0
        public void Load(Stream data, bool fast = false)
        {
            uint headersize  = Helpers.ReadLEUInt(data);
            long startoffset = data.Position;

            Header                 = new HeaderStruct();
            Header.magic           = Helpers.ReadLEUInt(data);
            Header.totalCount      = Helpers.ReadLEUInt(data);
            Header.ebxCount        = Helpers.ReadLEUInt(data);
            Header.resCount        = Helpers.ReadLEUInt(data);
            Header.chunkCount      = Helpers.ReadLEUInt(data);
            Header.stringOffset    = Helpers.ReadLEUInt(data);
            Header.chunkMetaOffset = Helpers.ReadLEUInt(data);
            Header.chunkMetaSize   = Helpers.ReadLEUInt(data);
            ReadSha1List(data);
            ReadEbxList(data);
            ReadResList(data);
            ReadChunkList(data);
            if (Header.chunkCount != 0)
            {
                ChunkMeta = BJSON.ReadField(data);
            }
            else
            {
                ChunkMeta = null;
            }
            data.Seek(startoffset + Header.stringOffset, 0);
            ReadEbxListNames(data);
            data.Seek(startoffset + Header.stringOffset, 0);
            ReadResListNames(data);
            data.Seek(startoffset + headersize, 0);
            ReadEbxListData(data, fast);
            ReadResListData(data, fast);
            ReadChunkListData(data, fast);
            ApplySHA1s();
        }
Example #8
0
        private void ProcessBundles(BJSON.Field f)
        {
            List <BJSON.Entry> list = (List <BJSON.Entry>)f.data;

            foreach (BJSON.Entry e in list)
            {
                TOCBundleInfoStruct info = new TOCBundleInfoStruct();
                info.isdelta = false;
                foreach (BJSON.Field f2 in e.fields)
                {
                    switch (f2.fieldname)
                    {
                    case "id":
                        info.id = (string)f2.data;
                        break;

                    case "offset":
                        info.offset = Helpers.ReadInt(new MemoryStream((byte[])f2.data));
                        break;

                    case "size":
                        info.size = Helpers.ReadInt(new MemoryStream((byte[])f2.data));
                        break;

                    case "delta":
                        info.isdelta = (bool)f2.data;
                        break;

                    case "base":
                        info.isbase = (bool)f2.data;
                        break;
                    }
                }
                bundles.Add(info);
            }
        }
Example #9
0
 private bool BuildSB(string tocname)
 {
     rtb2.AppendText("Building SB file...\n");
     string dir = Path.GetDirectoryName(tocname) + "\\";
     string filename = Path.GetFileNameWithoutExtension(tocname) + ".sb";
     string odir = Path.GetDirectoryName(toc.MyPath) + "\\";
     string ofilename = Path.GetFileNameWithoutExtension(toc.MyPath) + ".sb";
     rtb2.AppendText("Writing " + filename + " ...\n");
     try
     {
         BJSON.Entry root = toc.lines[0];
         BJSON.Field bundles = root.FindField("bundles");
         BJSON.Field tbun = bundles;
         List<BJSON.Entry> list = (List<BJSON.Entry>)bundles.data;
         FileStream fs = new FileStream(dir + filename, FileMode.Create, FileAccess.Write);
         FileStream ofs = new FileStream(odir + ofilename, FileMode.Open, FileAccess.Read);
         MemoryStream m = new MemoryStream();
         long gl_off = 0;
         for (int i = 0; i < list.Count; i++)
         {
             BJSON.Entry e = list[i];
             long offset = 0;
             int size = 0;
             bool isbase = false;
             BJSON.Field offset_field = new BJSON.Field();
             BJSON.Field size_field = new BJSON.Field();
             BJSON.Field isbase_field = new BJSON.Field();
             foreach (BJSON.Field f in e.fields)
                 switch (f.fieldname)
                 {
                     case "offset":
                         offset = BitConverter.ToInt64((byte[])f.data, 0);
                         offset_field = f;
                         break;
                     case "size":
                         size = BitConverter.ToInt32((byte[])f.data, 0);
                         size_field = f;
                         break;
                     case "base":
                         isbase = (bool)f.data;
                         isbase_field = f;
                         break;
                 }
             if (SelectForReplacement[i] == null)
             {
                 if (isbase)
                     continue;
                 offset_field.data = BitConverter.GetBytes(gl_off);
                 CopyFileStream(ofs, m, offset, size);
             }
             else
             {
                 byte[] buf = File.ReadAllBytes(SelectForReplacement[i]);
                 size = buf.Length;
                 if (isbase)
                     isbase_field.data = false;
                 offset_field.data = BitConverter.GetBytes(gl_off);
                 size_field.data = BitConverter.GetBytes((long)size);
                 m.Write(buf, 0, size);
             }
             gl_off += size;
             if (SelectForDuplication[i])
             {
                 BJSON.Entry te = new BJSON.Entry();
                 te.type = e.type;
                 te.type87name = e.type87name;
                 te.fields = new List<BJSON.Field>();
                 foreach (BJSON.Field f in e.fields)
                 {
                     BJSON.Field tf = new BJSON.Field();
                     tf.fieldname = f.fieldname;
                     tf.type = f.type;
                     switch (f.fieldname)
                     {
                         case "offset":
                             tf.data = BitConverter.GetBytes(BitConverter.ToInt64((byte[])f.data, 0));
                             break;
                         case "size":
                             tf.data = BitConverter.GetBytes(BitConverter.ToInt32((byte[])f.data, 0));
                             break;
                         case "base":
                             tf.data = (bool)f.data;
                             break;
                         default:
                             tf.data = f.data;
                             break;
                     }
                     te.fields.Add(tf);
                 }
                 list.Insert(i + 1, te);
                 bundles.data = list;
                 SelectForDuplication.Insert(i + 1, false);
                 SelectForReplacement.Insert(i + 1, null);
             }
         }
         ofs.Close();
         MemoryStream t = new MemoryStream();
         Helpers.WriteLEB128(t, (int)m.Length);
         m.WriteByte(0);
         int varsize = (int)t.Length;
         fs.WriteByte(0x82);
         Helpers.WriteLEB128(fs, varsize + 9 + (int)m.Length);
         byte[] buff = { 0x01, 0x62, 0x75, 0x6E, 0x64, 0x6C, 0x65, 0x73, 0x00 };
         fs.Write(buff, 0, 9);
         fs.Write(t.ToArray(), 0, varsize);
         int rel_off = (int)fs.Position;
         fs.Write(m.ToArray(), 0, (int)m.Length);
         fs.Close();
         rtb2.AppendText("Saved " + filename + "\nUpdating TOC Entries...\n");
         for (int i = 0; i < list.Count; i++)
         {
             BJSON.Entry e = list[i];
             long offset = 0;
             bool isbase = false;
             BJSON.Field offset_field = new BJSON.Field();
             foreach (BJSON.Field f in e.fields)
                 switch (f.fieldname)
                 {
                     case "offset":
                         offset = BitConverter.ToInt64((byte[])f.data, 0);
                         offset_field = f;
                         break;
                     case "base":
                         isbase = (bool)f.data;
                         break;
                 }
             if (isbase)
                 continue;
             offset_field.data = BitConverter.GetBytes(offset + rel_off);
         }
     }
     catch (Exception ex)
     {
         rtb2.AppendText("ERROR: " + ex.Message);
         return false;
     }
     return true;
 }
Example #10
0
 public void RunTextureResJobOnBundle(Mod.ModJob mj, TOCFile toc, string tocpath, byte[] newsha1, string bpath, int newcompressedsize)
 {
     GC.Collect();
     int count = 0;
     int index = -1;
     foreach (TOCFile.TOCBundleInfoStruct buni in toc.bundles)
         if (count++ > -1 && bpath == buni.id)
         {
             DbgPrint(" Found bundle : " + bpath);
             index = count - 1;
             break;
         }
     //if bundle found
     if (index != -1)
     {
         if (!toc.iscas)
         {
             DbgPrint(" Warning: binary bundles not supported yet, skipping!");
             return;
         }
         //find out if base or delta
         BJSON.Entry root = toc.lines[0];
         BJSON.Field bundles = root.FindField("bundles");
         BJSON.Entry bun = ((List<BJSON.Entry>)bundles.data)[index];
         BJSON.Field isDeltaField = bun.FindField("delta");
         BJSON.Field isBaseField = bun.FindField("base");
         //if is base, copy from base, make delta and recompile
         if (isBaseField != null && (bool)isBaseField.data == true)
             if (!ImportBundleFromBase(toc, tocpath, index, bpath))
                 return;
         //check if already is in sb
         if (isDeltaField != null && (bool)isDeltaField.data == true)
             DbgPrint("  Its already a delta");
         DbgPrint("  Updating SB file with new SHA1...");//yeah, pretty much
         string SBpath = outputPath + Path.GetDirectoryName(tocpath) + "\\" + Path.GetFileNameWithoutExtension(tocpath) + ".sb";
         SBFile sb = new SBFile(SBpath);
         root = sb.lines[0];
         bundles = root.FindField("bundles");
         List<BJSON.Entry> bundle_list =(List<BJSON.Entry>)bundles.data;
         //find right bundle
         for (int i = 0; i < bundle_list.Count; i++)
         {
             bun = bundle_list[i];
             BJSON.Field ebx = bun.FindField("ebx");
             BJSON.Field res = bun.FindField("res");
             BJSON.Field chunks = bun.FindField("chunks");
             BJSON.Field path = bun.FindField("path");
             if (!(path != null && (string)path.data == bpath) || res == null || chunks == null)
                 continue;
             bool found = false;
             byte[] chunkidbuff = new byte[16];
             byte[] newchunkid = new byte[16];
             //find right res entry
             List<BJSON.Entry> res_list = (List<BJSON.Entry>)res.data;
             for (int j = 0; j < res_list.Count; j++)
             {
                 BJSON.Entry res_e = res_list[j];
                 BJSON.Field f_sha1 = res_e.FindField("sha1");
                 BJSON.Field f_name = res_e.FindField("name");
                 BJSON.Field f_size = res_e.FindField("size");
                 BJSON.Field f_osize = res_e.FindField("originalSize");
                 BJSON.Field f_casPatchType = res_e.FindField("casPatchType");
                 if (f_name != null && (string)f_name.data == mj.respath && f_sha1 != null)
                 {
                     //get res data and extract chunk id
                     byte[] sha1buff = (byte[])f_sha1.data;
                     DbgPrint("  Found res sha1 : " + Helpers.ByteArrayToHexString(sha1buff));
                     byte[] resdata = SHA1Access.GetDataBySha1(sha1buff);
                     if (resdata.Length == 0)
                     {
                         DbgPrint("  Error: cant find res data, skipping!");
                         break;
                     }
                     for (int k = 0; k < 16; k++)
                         chunkidbuff[k] = resdata[k + 0x1C];
                     DbgPrint("  Found chunk id : " + Helpers.ByteArrayToHexString(chunkidbuff));
                     newchunkid = Guid.NewGuid().ToByteArray();
                     DbgPrint("  Creating new chunk id : " + Helpers.ByteArrayToHexString(newchunkid));
                     for (int k = 0; k < 16; k++)
                         resdata[k + 0x1C] = newchunkid[k];
                     int newrescompsize = 0;
                     byte[] newressha1 = CreateCASContainer(resdata, out newrescompsize, "  ");
                     DbgPrint("  Creating new res sha1 : " + Helpers.ByteArrayToHexString(newressha1));
                     f_sha1.data = newressha1;
                     DbgPrint("  Updating res size : " + resdata.Length);
                     f_size.data = BitConverter.GetBytes((long)newrescompsize);
                     f_osize.data = BitConverter.GetBytes((long)resdata.Length);
                     if (f_casPatchType != null)
                     {
                         if (BitConverter.ToInt32((byte[])f_casPatchType.data, 0) != 1)
                         {
                             DbgPrint("  CasPatchType: found and set to 1!");
                             f_casPatchType.data = BitConverter.GetBytes((int)1);
                         }
                         else
                             DbgPrint("  CasPatchType: found and is fine!");
                     }
                     else
                     {
                         f_casPatchType = new BJSON.Field();
                         f_casPatchType.fieldname = "casPatchType";
                         f_casPatchType.type = 8;
                         f_casPatchType.data = BitConverter.GetBytes((int)1);
                         res_e.fields.Add(f_casPatchType);
                         DbgPrint("  CasPatchType: added and set to 1!");
                     }
                     found = true;
                 }
             }
             if (!found)
             {
                 DbgPrint("  Error: cant find res, skipping!");
                 break;
             }
             found = false;
             //find right chunk entry
             List<BJSON.Entry> chunk_list = (List<BJSON.Entry>)chunks.data;
             for (int j = 0; j < chunk_list.Count; j++)
             {
                 BJSON.Entry chunk_e = chunk_list[j];
                 BJSON.Field f_id = chunk_e.FindField("id");
                 BJSON.Field f_size = chunk_e.FindField("size");
                 BJSON.Field f_rangeStart = chunk_e.FindField("rangeStart");
                 BJSON.Field f_rangeEnd = chunk_e.FindField("rangeEnd");
                 BJSON.Field f_logicalOffset = chunk_e.FindField("logicalOffset");
                 BJSON.Field f_logicalSize = chunk_e.FindField("logicalSize");
                 BJSON.Field f2_sha1 = chunk_e.FindField("sha1");
                 BJSON.Field f_casPatchType2 = chunk_e.FindField("casPatchType");
                 if (f_id != null && Helpers.ByteArrayCompare((byte[])f_id.data, chunkidbuff))
                 {
                     DbgPrint("  Found chunk");
                     f_id.data = newchunkid;
                     found = true;
                     if (f_casPatchType2 != null)
                     {
                         if (BitConverter.ToInt32((byte[])f_casPatchType2.data, 0) != 1)
                         {
                             DbgPrint("  CasPatchType: found and set to 1!");
                             f_casPatchType2.data = BitConverter.GetBytes((int)1);
                         }
                         else
                             DbgPrint("  CasPatchType: found and is fine!");
                     }
                     else
                     {
                         f_casPatchType2 = new BJSON.Field();
                         f_casPatchType2.fieldname = "casPatchType";
                         f_casPatchType2.type = 8;
                         f_casPatchType2.data = BitConverter.GetBytes((int)1);
                         chunk_e.fields.Add(f_casPatchType2);
                         DbgPrint("  CasPatchType: added and set to 1!");
                     }
                     f_size.data = BitConverter.GetBytes(newcompressedsize);
                     if (f_rangeStart != null)
                         f_rangeStart.data = BitConverter.GetBytes((int)0);
                     if (f_rangeEnd != null)
                         f_rangeEnd.data = BitConverter.GetBytes(newcompressedsize);
                     if (f_logicalOffset != null)
                         f_logicalOffset.data = BitConverter.GetBytes((int)0);
                     if (f_logicalSize != null)
                         f_logicalSize.data = BitConverter.GetBytes(mj.data.Length);
                     f2_sha1.data = newsha1;
                     DbgPrint("  Updated chunk size : " + mj.data.Length);
                     CalcTotalSize(bun);
                     sb.Save();
                     found = true;
                     DbgPrint("  Replaced chunk sha1 and saved SB file");
                     DbgPrint("  Job successfull!");
                     break;
                 }
             }
             if (!found)
                 DbgPrint("  Error: Could not find Chunk by id");
         }
     }
 }
Example #11
0
 public void RunRessourceJobOnBundle(Mod.ModJob mj, TOCFile toc, string tocpath, byte[] newsha1, string bpath, int newcompressedsize)
 {
     GC.Collect();
     int count = 0;
     int index = -1;
     foreach (TOCFile.TOCBundleInfoStruct buni in toc.bundles)
         if (count++ > -1 && bpath.ToLower() == buni.id.ToLower())
         {
             DbgPrint(" Found bundle : " + bpath);
             index = count - 1;
             break;
         }
     //if bundle found
     if (index != -1)
     {
         if (!toc.iscas)
         {
             DbgPrint(" Warning: binary bundles not supported yet, skipping!");
             return;
         }
         //find out if base or delta
         BJSON.Entry root = toc.lines[0];
         BJSON.Field bundles = root.FindField("bundles");
         BJSON.Entry bun = ((List<BJSON.Entry>)bundles.data)[index];
         BJSON.Field isDeltaField = bun.FindField("delta");
         BJSON.Field isBaseField = bun.FindField("base");
         //if is base, copy from base, make delta and recompile
         if (isBaseField != null && (bool)isBaseField.data == true)
             if (!ImportBundleFromBase(toc, tocpath, index, bpath))
                 return;
         //check if already is in sb
         if (isDeltaField != null && (bool)isDeltaField.data == true)
             DbgPrint("  Its already a delta");
         DbgPrint("  Updating SB file with new SHA1...");//yeah, pretty much
         string SBpath = outputPath + Path.GetDirectoryName(tocpath) + "\\" + Path.GetFileNameWithoutExtension(tocpath) + ".sb";
         SBFile sb = new SBFile(SBpath);
         root = sb.lines[0];
         bundles = root.FindField("bundles");
         List<BJSON.Entry> bundle_list = (List<BJSON.Entry>)bundles.data;
         //find right bundle
         for (int i = 0; i < bundle_list.Count; i++)
         {
             bun = bundle_list[i];
             BJSON.Field ebx = bun.FindField("ebx");
             BJSON.Field res = bun.FindField("res");
             BJSON.Field path = bun.FindField("path");
             if (!(path != null && ((string)path.data).ToLower() == bpath.ToLower()) || res == null)
                 continue;
             bool found = false;
             //find right res entry
             List<BJSON.Entry> res_list = (List<BJSON.Entry>)res.data;
             for (int j = 0; j < res_list.Count; j++)
             {
                 BJSON.Entry res_e = res_list[j];
                 BJSON.Field f_sha1 = res_e.FindField("sha1");
                 BJSON.Field f_name = res_e.FindField("name");
                 BJSON.Field f_size = res_e.FindField("size");
                 BJSON.Field f_osize = res_e.FindField("originalSize");
                 BJSON.Field f_casPatchType = res_e.FindField("casPatchType");
                 if (f_name != null && ((string)f_name.data).ToLower() == mj.respath.ToLower() && f_sha1 != null)
                 {
                     //get res data
                     byte[] sha1buff = (byte[])f_sha1.data;
                     DbgPrint("  Found res sha1 : " + Helpers.ByteArrayToHexString(sha1buff));
                     f_sha1.data = newsha1;
                     DbgPrint("  Replaced res sha1 with : " + Helpers.ByteArrayToHexString(newsha1));
                     DbgPrint("  Updating res size : " + mj.data.Length);
                     f_size.data = BitConverter.GetBytes((long)newcompressedsize);
                     f_osize.data = BitConverter.GetBytes((long)mj.data.Length);
                     if (f_casPatchType != null)
                     {
                         if (BitConverter.ToInt32((byte[])f_casPatchType.data, 0) != 1)
                         {
                             DbgPrint("  CasPatchType: found and set to 1!");
                             f_casPatchType.data = BitConverter.GetBytes((int)1);
                         }
                         else
                             DbgPrint("  CasPatchType: found and is fine!");
                     }
                     else
                     {
                         f_casPatchType = new BJSON.Field();
                         f_casPatchType.fieldname = "casPatchType";
                         f_casPatchType.type = 8;
                         f_casPatchType.data = BitConverter.GetBytes((int)1);
                         res_e.fields.Add(f_casPatchType);
                         DbgPrint("  CasPatchType: added and set to 1!");
                     }
                     CalcTotalSize(bun);
                     sb.Save();
                     DbgPrint("  Job successfull!");
                     found = true;
                 }
             }
             if (!found)
             {
                 DbgPrint("  cant find res, adding it!");
                 BJSON.Entry newres = new BJSON.Entry();
                 newres.type = 0x82;
                 newres.fields = new List<BJSON.Field>();
                 newres.fields.Add(new BJSON.Field(7, "name", mj.respath));
                 newres.fields.Add(new BJSON.Field(0x10, "sha1", newsha1));
                 newres.fields.Add(new BJSON.Field(9, "size", BitConverter.GetBytes((long)newcompressedsize)));
                 newres.fields.Add(new BJSON.Field(9, "originalSize", BitConverter.GetBytes((long)mj.data.Length)));
                 newres.fields.Add(new BJSON.Field(0x8, "resType", Helpers.HexStringToByteArray(mj.restype)));
                 newres.fields.Add(new BJSON.Field(0x13, "resMeta", new byte[0x10]));
                 newres.fields.Add(new BJSON.Field(9, "resRid", BitConverter.GetBytes((long)0)));
                 newres.fields.Add(new BJSON.Field(8, "casPatchType", BitConverter.GetBytes((int)1)));
                 ((List<BJSON.Entry>)res.data).Add(newres);
                 CalcTotalSize(bun);
                 sb.Save();
                 DbgPrint("  Job successfull!");
                 break;
             }
         }
     }
 }