public BDAT_LOOSE Loose; // * public void Read(BinaryReader br) { Compressed = br.ReadByte(); if (Convert.ToBoolean(Compressed)) { if (Compressed > 1) { br.BaseStream.Seek(br.BaseStream.Position - 1, SeekOrigin.Begin); } Archive = new BDAT_ARCHIVE(); Archive.Read(br); Loose = null; if (Compressed > 1) { Deprecated = br.ReadByte(); } } else { Loose = new BDAT_LOOSE(); Loose.Read(br); Archive = null; } }
public void DumpGeneral(string file) { Console.Write("\rDumping GENERAL..."); if (_indexCommons > -1) { using (StreamWriter outfile = new StreamWriter(file)) { BDAT_LIST blist = _content.Lists[_indexCommons]; if (blist.Collection.Compressed > 0) { BDAT_ARCHIVE barchive = blist.Collection.Archive; for (int s = 0; s < barchive.SubArchiveCount; s++) { BDAT_SUBARCHIVE bsubarchive = barchive.SubArchives[s]; for (int f = 0; f < bsubarchive.FieldLookupCount; f++) { BDAT_LOOKUPTABLE blookup = bsubarchive.Lookups[f]; string[] words = LookupSplitToWords(blookup.Data, blookup.Size); for (int w = 0; w < words.Length; w++) { if (w % 2 == 0) { outfile.WriteLine("<alias>"); outfile.WriteLine(words[w]); outfile.WriteLine("</alias>"); } else { outfile.WriteLine("<text>"); outfile.WriteLine(words[w]); outfile.WriteLine("</text>"); } } } } } else { BDAT_LOOSE bloose = blist.Collection.Loose; string[] words = LookupSplitToWords(bloose.Lookup.Data, bloose.SizeLookup); for (int w = 0; w < words.Length; w++) { if (w % 2 == 0) { outfile.WriteLine("<alias>"); outfile.WriteLine(words[w]); outfile.WriteLine("</alias>"); } else { outfile.WriteLine("<text>"); outfile.WriteLine(words[w]); outfile.WriteLine("</text>"); } } } } } }
public void DumpCommand(string file) { Console.Write("\rDumping COMMAND..."); if (_indexCommands > -1) { using (StreamWriter outfile = new StreamWriter(file)) { BDAT_LIST blist = _content.Lists[_indexCommands]; BDAT_LOOSE bloose = blist.Collection.Loose; string[] words = LookupSplitToWords(bloose.Lookup.Data, bloose.SizeLookup); for (int w = 0; w < words.Length; w++) { outfile.WriteLine("<alias>"); outfile.WriteLine("</alias>"); outfile.WriteLine("<text>"); outfile.WriteLine(words[w]); outfile.WriteLine("</text>"); } } } //Console.WriteLine("_indexCommands: " + _indexCommands); }
public void DumpFAQ(string file) { Console.Write("\rDumping FAQ..."); if (_indexFaqs > -1) { BDAT_LIST blist = _content.Lists[_indexFaqs]; BDAT_LOOSE bloose = blist.Collection.Loose; string[] words = LookupSplitToWords(bloose.Lookup.Data, bloose.SizeLookup); using (StreamWriter outfile = new StreamWriter(file)) { foreach (string text in words) { outfile.WriteLine("<alias>"); outfile.WriteLine("</alias>"); outfile.WriteLine("<text>"); outfile.WriteLine(text); outfile.WriteLine("</text>"); } } } }
public void DumpXML(string dir) { for (int l = 0; l < _content.ListCount; l++) { //processedEvent((l + 1), _content.ListCount); Console.Write("\rDumping XML: {0}/{1}", (l + 1), _content.ListCount); BDAT_LIST blist = _content.Lists[l]; using (StreamWriter outfile = new StreamWriter(dir + "\\datafile_" + blist.ID + ".xml")) { outfile.WriteLine("<?xml version=\"1.0\" encoding=\"utf-8\"?>"); outfile.WriteLine(string.Format("<list id=\"{0}\" size=\"{1}\" unk1=\"{2}\" unk2=\"{3}\" unk3=\"{4}\">", blist.ID, blist.Size, blist.Unknown1, blist.Unknown2, blist.Unknown3)); BDAT_COLLECTION bcollection = blist.Collection; outfile.WriteLine(string.Format("\t<collection compressed=\"{0}\">", bcollection.Compressed)); if (bcollection.Compressed > 0) { BDAT_ARCHIVE barchive = bcollection.Archive; outfile.WriteLine(string.Format("\t\t<archive count=\"{0}\">", barchive.SubArchiveCount)); for (int a = 0; a < barchive.SubArchiveCount; a++) { BDAT_SUBARCHIVE bsubarchive = barchive.SubArchives[a]; outfile.WriteLine(string.Format("\t\t\t<subarchive count=\"{0}\" StartAndEndFieldId=\"{1}\">", bsubarchive.FieldLookupCount, m_bnsDat.BytesToHex(bsubarchive.StartAndEndFieldId))); for (int f = 0; f < bsubarchive.FieldLookupCount; f++) { BDAT_FIELDTABLE bfield = bsubarchive.Fields[f]; outfile.Write(string.Format("\t\t\t\t<field ID=\"{3}\" size=\"{0}\" unk1=\"{1}\" unk2=\"{2}\">", bfield.Size, bfield.Unknown1, bfield.Unknown2, bfield.ID)); outfile.Write(m_bnsDat.BytesToHex(bfield.Data));//00 00 00 00 00 00 00 00 0c 00 00 00 50 00 00 00 outfile.WriteLine("</field>"); BDAT_LOOKUPTABLE blookup = bsubarchive.Lookups[f]; string[] words = LookupSplitToWords(blookup.Data, blookup.Size); outfile.WriteLine(string.Format("\t\t\t\t<lookup count=\"{0}\">", words.Length)); int empty = 0; for (int w = 0; w < words.Length; w++) { if (words[w] != null && words[w].Length > 0) { outfile.Write("\t\t\t\t\t<word>"); outfile.Write(words[w]); outfile.WriteLine("</word>"); } else { empty++; } } outfile.WriteLine(string.Format("\t\t\t\t\t<empty count=\"{0}\"/>", empty)); outfile.WriteLine("\t\t\t\t</lookup>"); } outfile.WriteLine("\t\t\t</subarchive>"); } outfile.WriteLine("\t\t</archive>"); } else { BDAT_LOOSE bloose = bcollection.Loose; outfile.WriteLine(string.Format("\t\t<loose countFields=\"{0}\" sizeFields=\"{1}\" sizePadding=\"{2}\" sizeLookup=\"{3}\" unk=\"{4}\">", bloose.FieldCount, bloose.SizeFields, bloose.SizePadding, bloose.SizeLookup, bloose.Unknown)); for (int f = 0; f < bloose.FieldCount; f++) { BDAT_FIELDTABLE bfield = bloose.Fields[f]; outfile.Write(string.Format("\t\t\t<field size=\"{0}\" unk1=\"{1}\" unk2=\"{2}\">", bfield.Size, bfield.Unknown1, bfield.Unknown2)); outfile.Write(m_bnsDat.BytesToHex(bfield.Data)); outfile.WriteLine("</field>"); } outfile.Write("\t\t\t<padding>"); if (bloose.Padding != null) { outfile.Write(m_bnsDat.BytesToHex(bloose.Padding)); } outfile.WriteLine("</padding>"); string[] words = LookupSplitToWords(bloose.Lookup.Data, bloose.Lookup.Size); outfile.WriteLine(string.Format("\t\t\t<lookup count=\"{0}\">", words.Length)); int empty = 0; for (int w = 0; w < words.Length; w++) { // only add non-empty words if (words[w] != null && words[w].Length > 0) { outfile.Write("\t\t\t\t<word>"); outfile.Write(words[w]);//WebUtility.HtmlEncode( outfile.WriteLine("</word>"); } else { empty++; } } outfile.WriteLine(string.Format("\t\t\t\t<empty count=\"{0}\"/>", empty)); outfile.WriteLine("\t\t\t</lookup>"); outfile.WriteLine("\t\t</loose>"); } outfile.WriteLine("\t</collection>"); outfile.WriteLine("</list>"); } } Console.Write("\rDone!!"); }
public void TranslateGeneral(TranslateReader translator) { if (_indexCommons > -1) { int index = 0; int index2 = index; BDAT_LIST blist = _content.Lists[_indexCommons]; if (blist.Collection.Compressed > 0) { BDAT_ARCHIVE barchive = blist.Collection.Archive; BDAT_SUBARCHIVE bsubarchive; List <BDAT_SUBARCHIVE> subNews = new List <BDAT_SUBARCHIVE>(); for (int s = 0; s < barchive.SubArchiveCount; s++) { bsubarchive = barchive.SubArchives[s]; // Console.Write("\rTranslateGeneral SubArchive: {0}/{1}", (s + 1), barchive.SubArchiveCount); for (int f = 0; f < bsubarchive.FieldLookupCount; f++) { //Console.Write("\rTranslateGeneral FieldLookup: {0}/{1}", (f + 1), bsubarchive.FieldLookupCount); BDAT_FIELDTABLE field = bsubarchive.Fields[f]; BDAT_LOOKUPTABLE blookup = bsubarchive.Lookups[f]; string[] words = LookupSplitToWords(blookup.Data, blookup.Size); string translated = translator.Translate(words[1], words[0]); // alias //Buffer.BlockCopy(BitConverter.GetBytes(words[0].Length), 0, field.Data, 12, 4); //translate if (translated != null) { words[1] = translated; blookup.Data = LookupWorldsToBytes(words); blookup.Size = blookup.Data.Length; // set new test size //Buffer.BlockCopy(BitConverter.GetBytes(words[1].Length), 0, field.Data, 8, 4); } } if (bsubarchive.NeedSplit(ref index)) { BDAT_SUBARCHIVE bClone1 = new BDAT_SUBARCHIVE() { StartAndEndFieldId = new byte[16], Fields = new BDAT_FIELDTABLE[index], Lookups = new BDAT_LOOKUPTABLE[index], FieldLookupCount = index }; Array.Copy(bsubarchive.Fields, 0, bClone1.Fields, 0, index); Array.Copy(bsubarchive.Lookups, 0, bClone1.Lookups, 0, index); // set new start field id Buffer.BlockCopy(BitConverter.GetBytes(bClone1.Fields[0].ID), 0, bClone1.StartAndEndFieldId, 0, 4); // set new end field id Buffer.BlockCopy(BitConverter.GetBytes(bClone1.Fields[index - 1].ID), 0, bClone1.StartAndEndFieldId, 8, 4); subNews.Add(bClone1); //part 2 index2 = bsubarchive.FieldLookupCount - index; BDAT_SUBARCHIVE bClone2 = new BDAT_SUBARCHIVE() { StartAndEndFieldId = new byte[16], Fields = new BDAT_FIELDTABLE[index2], Lookups = new BDAT_LOOKUPTABLE[index2], FieldLookupCount = index2 }; Array.Copy(bsubarchive.Fields, index, bClone2.Fields, 0, index2); Array.Copy(bsubarchive.Lookups, index, bClone2.Lookups, 0, index2); // set new start field id Buffer.BlockCopy(BitConverter.GetBytes(bClone2.Fields[0].ID), 0, bClone2.StartAndEndFieldId, 0, 4); // set new end field id Buffer.BlockCopy(BitConverter.GetBytes(bClone2.Fields[index2 - 1].ID), 0, bClone2.StartAndEndFieldId, 8, 4); subNews.Add(bClone2); Console.WriteLine("A:{0}<>B:{1}.OK!", m_bnsDat.BytesToHex(bClone1.StartAndEndFieldId), m_bnsDat.BytesToHex(bClone2.StartAndEndFieldId)); } else { subNews.Add(bsubarchive); } } barchive.SubArchiveCount = subNews.Count; barchive.SubArchives = subNews.ToArray(); // Console.WriteLine("IF A==B that mean have something wrong! Check source code and fix it."); //Console.WriteLine("\rDone!!"); } else { BDAT_LOOSE bloose = blist.Collection.Loose; BDAT_LOOKUPTABLE blookup = bloose.Lookup; string[] words = LookupSplitToWords(bloose.Lookup.Data, bloose.SizeLookup); for (int w = 0; w < words.Length; w += 2) { string translated = translator.Translate(words[w + 1], words[w]); if (translated != null) { words[w + 1] = translated; } //Console.WriteLine("words[w + 1]: " + words[w + 1]); } blookup.Data = LookupWorldsToBytes(words); blookup.Size = blookup.Data.Length; } } }