static unsafe void Main(string[] args) { Console.WriteLine(string.Format("AllaganTextNode v{0}", Assembly.GetExecutingAssembly().GetName().Version.ToString())); string baseDir = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); string inputDir = Path.Combine(baseDir, "input"); string globalDir = Path.Combine(inputDir, "global"); string koreanDir = Path.Combine(inputDir, "korean"); string glIndexPath = Path.Combine(globalDir, "000000.win32.index"); string koIndexPath = Path.Combine(koreanDir, "000000.win32.index"); string glDatPath = Path.Combine(globalDir, "000000.win32.dat0"); string koDatPath = Path.Combine(koreanDir, "000000.win32.dat0"); string outputDir = Path.Combine(baseDir, "output"); if (!Directory.Exists(outputDir)) { Directory.CreateDirectory(outputDir); } string outputIndexPath = Path.Combine(outputDir, Path.GetFileName(glIndexPath)); File.Copy(glIndexPath, outputIndexPath, true); byte[] index = File.ReadAllBytes(outputIndexPath); IndexFile indexFile = new IndexFile(); indexFile.ReadData(index); foreach (IndexDirectoryInfo directory in indexFile.DirectoryInfo) { if (directory.Key != Hash.Compute("common/font")) { continue; } List <IndexFileInfo> files = directory.FileInfo.ToList(); IndexFileInfo font1 = files.First(f => f.Key == Hash.Compute("font1.tex")); IndexFileInfo font8 = new IndexFileInfo(); font8.Key = Hash.Compute("font8.tex"); font8.DirectoryInfo = directory; font8.WrappedOffset = font1.WrappedOffset; files.Add(font8); directory.FileInfo = files.ToArray(); } index = indexFile.RepackData(index); File.WriteAllBytes(outputIndexPath, index); Dictionary <uint, Dictionary <uint, SqFile> > glSqFiles = readIndex(outputIndexPath, glDatPath); Dictionary <uint, Dictionary <uint, SqFile> > koSqFiles = readIndex(koIndexPath, koDatPath); byte[] origDat = File.ReadAllBytes(glDatPath); string outputNewDatPath = Path.Combine(outputDir, "000000.win32.dat1"); //CreateNewDat(glDatPath, outputNewDatPath); File.Copy(koDatPath, outputNewDatPath, true); SqFile glFontTexFile = glSqFiles[Hash.Compute("common/font")][Hash.Compute("font8.tex")]; SqFile koFontTexFile = koSqFiles[Hash.Compute("common/font")][Hash.Compute("font_krn_1.tex")]; glFontTexFile.UpdateOffset(koFontTexFile.Offset, 1, index); SqFile glMappingFile = glSqFiles[Hash.Compute("common/font")][Hash.Compute("axis_12.fdt")]; SqFile koMappingFile = koSqFiles[Hash.Compute("common/font")][Hash.Compute("krnaxis_120.fdt")]; byte[] glMappingBytes = glMappingFile.ReadData(); byte[] koMappingBytes = koMappingFile.ReadData(); File.WriteAllBytes(Path.Combine(outputDir, "glMappingBytes"), glMappingBytes); File.WriteAllBytes(Path.Combine(outputDir, "koMappingBytes"), koMappingBytes); // hangul jamo -> 1100~11ff // hangul compatibility jamo -> 3130~318f // hangul jamo extended-a -> a960~a97f // hangul syllables -> ac00-d7af // hangul jamo extended-b -> d7b0~d7ff // global range 0x40~0x1d9af // korean range 0x40~0x309cf Dictionary <string, byte[]> glRows = new Dictionary <string, byte[]>(); for (long i = 0x60; i <= 0x1d9c0; i += 0x10) { byte[] row = new byte[0x10]; Array.Copy(glMappingBytes, i, row, 0, 0x10); int j = 0; for (j = 0; j < row.Length; j++) { if (row[j] == 0) { break; } } byte[] utf = new byte[j]; Array.Copy(row, 0, utf, 0, j); Array.Reverse(utf); string key = Encoding.UTF8.GetString(utf); if (!glRows.ContainsKey(key)) { glRows.Add(key, row); } } using (StreamWriter sw = new StreamWriter(Path.Combine(outputDir, "glRows"), false)) { new XmlSerializer(typeof(string[])).Serialize(sw, glRows.Keys.ToArray()); } Dictionary <string, byte[]> koRows = new Dictionary <string, byte[]>(); Dictionary <string, byte[]> diffRows = new Dictionary <string, byte[]>(); for (long i = 0x60; i <= 0x309c0; i += 0x10) { byte[] row = new byte[0x10]; Array.Copy(koMappingBytes, i, row, 0, 0x10); int j = 0; for (j = 0; j < row.Length; j++) { if (row[j] == 0) { break; } } byte[] utf = new byte[j]; Array.Copy(row, 0, utf, 0, j); Array.Reverse(utf); string key = Encoding.UTF8.GetString(utf); if (!koRows.ContainsKey(key)) { koRows.Add(key, row); } if (!glRows.ContainsKey(key)) { diffRows.Add(key, row); } } using (StreamWriter sw = new StreamWriter(Path.Combine(outputDir, "koRows"), false)) { new XmlSerializer(typeof(string[])).Serialize(sw, koRows.Keys.ToArray()); } using (StreamWriter sw = new StreamWriter(Path.Combine(outputDir, "diffRows"), false)) { new XmlSerializer(typeof(string[])).Serialize(sw, diffRows.Keys.ToArray()); } foreach (string key in diffRows.Keys) { byte[] modRow = new byte[0x10]; Array.Copy(diffRows[key], 0, modRow, 0, 0x10); modRow[0x6] = 0x1; modRow[0x7] = 0x0; modRow[0x8] = 0x72; modRow[0x9] = 0x0; modRow[0xa] = 0xda; modRow[0xb] = 0x1; modRow[0xc] = 0x8; modRow[0xd] = 0x10; modRow[0xe] = 0x0; modRow[0xf] = 0x0; glRows.Add(key, modRow); } string[] orderedKeys = glRows.Keys.OrderBy(s => { byte[] b = Encoding.UTF8.GetBytes(s); byte[] p = new byte[4]; Array.Copy(b, 0, p, 0, b.Length); Array.Reverse(p); return(BitConverter.ToUInt32(p, 0)); }).ToArray(); string newMappingPath = Path.Combine(outputDir, "newMapping"); byte[] mappingHeader = new byte[0x60]; Array.Copy(glMappingBytes, 0, mappingHeader, 0, 0x60); File.WriteAllBytes(newMappingPath, mappingHeader); using (FileStream fs = new FileStream(newMappingPath, FileMode.Append, FileAccess.Write)) using (BinaryWriter bw = new BinaryWriter(fs)) { foreach (string key in orderedKeys) { bw.Write(glRows[key]); } } byte[] mappingTail = new byte[0x430]; Array.Copy(glMappingBytes, 0x1d9d0, mappingTail, 0, 0x430); using (FileStream fs = new FileStream(newMappingPath, FileMode.Append, FileAccess.Write)) using (BinaryWriter bw = new BinaryWriter(fs)) { bw.Write(mappingTail); } byte[] newMapping = File.ReadAllBytes(newMappingPath); Array.Copy(BitConverter.GetBytes(newMapping.Length - 0x430), 0, newMapping, 0xc, 0x4); Array.Copy(BitConverter.GetBytes((short)((newMapping.Length - 0x430 - 0x40) / 0x10)), 0, newMapping, 0x24, 0x2); File.WriteAllBytes(newMappingPath, newMapping); /* * byte[] test = new byte[0x10]; * Array.Copy(koMappingBytes, 0x3b40, test, 0, 0x10); * * byte[] test2 = new byte[3]; * Array.Copy(test, 0, test2, 0, 3); * Array.Reverse(test2); * Console.WriteLine(); * foreach (byte b in test2) * { * Console.WriteLine(b.ToString()); * } * Console.WriteLine(); * Console.WriteLine(Encoding.UTF8.GetString(test2)); * * byte[] test3 = Encoding.UTF8.GetBytes("가"); * * foreach (byte b in test3) * { * Console.WriteLine(b.ToString()); * } * * Console.WriteLine(Encoding.UTF8.GetString(test3)); * Console.ReadLine();*/ //test[0x154] = 0x31; //test[0x158] = 0x60; <--- this seems to control coordinate? goes from 0x0~0xff //test[0x159] <--- seems to increment with 158. when 0x158 goes over 0xff this gets incremented by 0x1 //test[0x15a] <-- when 0x159 goes over 0x3 this changes //test[0x15b] <-- this also goes upto 0x3 //test[0x15c] = 0x6; -> width (on texture) //test[0x15d] = 0x2; -> height (on texture) // c~f looks like size-related thing. // in the fdt header area there seems to be something that controls how the texture is loaded... // compare axis_12.fdt with krnaxis_120.fdt // 0x0~0x3 -> unicode, big endian (flipped) // code page? // -> 0x0~0x3 points to tex1 // -> 0x4~0x7 points to tex2 (coordinate is the same) // -> 0x8 points to tex3 if tex3 is present. Otherwise went back to tex1. // -> 0x9 is empty (maybe original tex3) // seems 100 increase = 0x4 increase -> tex increment // 0 100 1000 1100 // 0x0 0x4 0x8 0xc 0x10 0x14 0x18 // font1 font2 font3 font4 font5 font6 font7 newMapping[0x156] = 0x1b; // coordinate newMapping[0x158] = 0x0; newMapping[0x159] = 0x0; newMapping[0x15a] = 0x0; newMapping[0x15b] = 0x0; //size newMapping[0x15c] = 0xff; newMapping[0x15d] = 0xff; byte[] repackedBuffer = glMappingFile.RepackData(origDat, newMapping); glMappingFile.UpdateOffset((int)new FileInfo(outputNewDatPath).Length, 1, index); using (FileStream fs = new FileStream(outputNewDatPath, FileMode.Append, FileAccess.Write)) using (BinaryWriter bw = new BinaryWriter(fs)) { bw.Write(repackedBuffer); } File.WriteAllBytes(outputIndexPath, index); UpdateDatHash(outputNewDatPath); /* * SqFile fontFile = glSqFiles[Hash.Compute("common/font")][Hash.Compute("font7.tex")]; * * using (FileStream fs = File.OpenRead(fontFile.DatPath)) * using (BinaryReader br = new BinaryReader(fs)) * { * br.BaseStream.Position = fontFile.Offset; * int endOfHeader = br.ReadInt32(); * * byte[] header = new byte[endOfHeader]; * br.BaseStream.Position = fontFile.Offset; * br.Read(header, 0, endOfHeader); * * Console.WriteLine(); * Console.WriteLine(BitConverter.ToInt32(header, 0x4)); * * byte[] imageHeader = new byte[0x50]; * br.Read(imageHeader, 0, 0x50); * * short imageFormat = BitConverter.ToInt16(imageHeader, 0x4); * short width = BitConverter.ToInt16(imageHeader, 0x8); * short height = BitConverter.ToInt16(imageHeader, 0xa); * * Console.WriteLine(imageFormat); * * if (!Enum.IsDefined(typeof(ImageFormat), (int)imageFormat)) throw new Exception(); * * short blockCount = BitConverter.ToInt16(header, 0x14); * int lengthsStartOffset = 0x18 + blockCount * 0x14; * * List<ushort> lengths = new List<ushort>(); * * for (int i = lengthsStartOffset; i + 1 < header.Length; i += 2) * { * ushort length = BitConverter.ToUInt16(header, i); * if (length == 0) break; * * lengths.Add(length); * } * * ushort[] lengthArray = lengths.ToArray(); * * using (MemoryStream ms = new MemoryStream()) * { * int blockOffset = 0; * * for (int i = 0; i < lengthArray.Length; i++) * { * byte[] blockHeader = new byte[0x10]; * br.BaseStream.Position = fontFile.Offset + endOfHeader + 0x50 + blockOffset; * br.Read(blockHeader, 0, 0x10); * * int magic = BitConverter.ToInt32(blockHeader, 0); * if (magic != 0x10) throw new Exception(); * * int sourceSize = BitConverter.ToInt32(blockHeader, 0x8); * int rawSize = BitConverter.ToInt32(blockHeader, 0xc); * * Console.WriteLine(sourceSize.ToString() + ", " + rawSize.ToString()); * * bool isCompressed = sourceSize < 0x7d00; * int actualSize = isCompressed ? sourceSize : rawSize; * * int paddingLeftover = (actualSize + 0x10) % 0x80; * if (isCompressed && paddingLeftover != 0) * { * actualSize += 0x80 - paddingLeftover; * } * * byte[] blockBuffer = new byte[actualSize]; * br.Read(blockBuffer, 0, actualSize); * * if (isCompressed) * { * using (MemoryStream _ms = new MemoryStream(blockBuffer)) * using (DeflateStream ds = new DeflateStream(_ms, CompressionMode.Decompress)) * { * ds.CopyTo(ms); * } * } * else * { * ms.Write(blockBuffer, 0, blockBuffer.Length); * } * * blockOffset += lengthArray[i]; * } * * byte[] data = ms.ToArray(); * * // A4R4G4B4 * if (imageFormat == 0x1440) * { * byte[] argb = new byte[width * height * 4]; * * for (int i = 0; (i + 2) <= 2 * width * height; i += 2) * { * ushort v = BitConverter.ToUInt16(data, i); * * for (int j = 0; j < 4; j++) * { * argb[i * 2 + j] = (byte)(((v >> (4 * j)) & 0xf) << 4); * } * } * * Image image; * fixed (byte* p = argb) * { * IntPtr ptr = (IntPtr)p; * using (Bitmap tempImage = new Bitmap(width, height, width * 4, PixelFormat.Format32bppArgb, ptr)) * { * image = new Bitmap(tempImage); * } * } * * image.Save(@"C:\Users\serap\Desktop\test.png", System.Drawing.Imaging.ImageFormat.Png); * } * } * * } */ }
static void Main(string[] args) { Console.WriteLine(string.Format("AllaganNode v{0}", Assembly.GetExecutingAssembly().GetName().Version.ToString())); // TODO: make base path selectable. string baseDir = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); string indexPath = Path.Combine(baseDir, "input", "0a0000.win32.index"); string datPath = Path.Combine(baseDir, "input", "0a0000.win32.dat0"); string outputDir = Path.Combine(baseDir, "output"); if (!Directory.Exists(outputDir)) { Directory.CreateDirectory(outputDir); } Dictionary <uint, Dictionary <uint, SqFile> > sqFiles = new Dictionary <uint, Dictionary <uint, SqFile> >(); // Read index and cache all available sqfiles. using (FileStream fs = File.OpenRead(indexPath)) using (BinaryReader br = new BinaryReader(fs)) { br.BaseStream.Position = 0xc; int headerOffset = br.ReadInt32(); br.BaseStream.Position = headerOffset + 0x8; int fileOffset = br.ReadInt32(); int fileCount = br.ReadInt32() / 0x10; br.BaseStream.Position = fileOffset; for (int i = 0; i < fileCount; i++) { SqFile sqFile = new SqFile(); sqFile.Key = br.ReadUInt32(); sqFile.DirectoryKey = br.ReadUInt32(); sqFile.WrappedOffset = br.ReadInt32(); sqFile.DatPath = datPath; br.ReadInt32(); if (!sqFiles.ContainsKey(sqFile.DirectoryKey)) { sqFiles.Add(sqFile.DirectoryKey, new Dictionary <uint, SqFile>()); } sqFiles[sqFile.DirectoryKey].Add(sqFile.Key, sqFile); Report(string.Format("{0} / {1}: {2}", i, fileCount, sqFile.Key)); } } // find root file that lists all ExHs in 0a0000. // root file encoding is simple ASCII. SqFile rootFile = sqFiles[Hash.Compute("exd")][Hash.Compute("root.exl")]; List <string> headerNames = new List <string>(); using (MemoryStream ms = new MemoryStream(rootFile.ReadData())) using (StreamReader sr = new StreamReader(ms, Encoding.ASCII)) using (StreamWriter sw = new StreamWriter(Path.Combine(outputDir, "root.exl"))) { sr.ReadLine(); while (!sr.EndOfStream) { string line = sr.ReadLine(); if (string.IsNullOrWhiteSpace(line)) { continue; } Report(line); sw.WriteLine(line); string[] split = line.Split(','); if (split.Length != 2) { continue; } headerNames.Add(split[0]); } } List <ExHFile> exHeaderList = new List <ExHFile>(); // for all ExHs, decode the cached data buffer as ExH. for (int i = 0; i < headerNames.Count; i++) { Report(string.Format("{0} / {1}: {2}", i, headerNames.Count, headerNames[i])); string headerDir = string.Empty; string headerName = headerNames[i]; if (headerName.Contains("/")) { headerDir = string.Format("/{0}", headerName.Substring(0, headerName.LastIndexOf("/"))); headerName = headerName.Substring(headerName.LastIndexOf("/") + 1); } headerDir = string.Format("exd{0}", headerDir); SqFile sqFile = sqFiles[Hash.Compute(headerDir)][Hash.Compute(string.Format("{0}.exh", headerName))]; ExHFile exHFile = new ExHFile(); exHFile.Copy(sqFile); exHFile.Name = headerName + ".exh"; exHFile.Dir = headerDir; exHFile.HeaderName = headerName; exHFile.ReadExH(); // only add ExHs with supported variant and string columns. if (exHFile.Variant == 1 && exHFile.Columns != null && exHFile.Columns.Length > 0) { exHeaderList.Add(exHFile); } } ExHFile[] exHeaders = exHeaderList.ToArray(); // for all ExHs, decode child ExDs and link them to ExH. for (int i = 0; i < exHeaders.Length; i++) { Report(string.Format("{0} / {1}: {2}", i, exHeaders.Length, exHeaders[i].Name)); ExHFile exHeader = exHeaders[i]; foreach (ExHLanguage lang in exHeader.Languages) { foreach (ExHRange range in exHeader.Ranges) { string datName = string.Format("{0}_{1}_{2}.exd", exHeader.HeaderName, range.Start, lang.Code); uint directoryKey = Hash.Compute(exHeader.Dir); uint key = Hash.Compute(datName); if (!sqFiles.ContainsKey(directoryKey)) { continue; } if (!sqFiles[directoryKey].ContainsKey(key)) { continue; } ExDFile exDat = new ExDFile(); exDat.Copy(sqFiles[directoryKey][key]); exDat.Name = datName; exDat.Dir = exHeader.Dir; exDat.HeaderName = exHeader.HeaderName; exDat.PhysicalDir = string.Format("{0}/{1}/{2}", exHeader.Dir, exHeader.HeaderName, range.Start); exDat.LanguageCode = lang.Code; exDat.ExHeader = exHeader; exDat.ReadExD(); exHeader.ExDats.Add(exDat); } } } Report(string.Empty); Console.Write("Enter an option (0 - extract, 1 - apply translations, 2 - repackage): "); switch (Console.ReadLine().ToLower()) { case "0": ExtractExDs(exHeaders, outputDir); break; case "1": ApplyTranslations(exHeaders, outputDir, baseDir); break; case "2": RepackExDs(exHeaders, outputDir, indexPath, datPath); break; // this hidden option swaps language codes. // it tries to map entries based on string key if available. if not, it maps based on chunk keys. case "swap": SwapCodes(exHeaders, outputDir); break; // this hidden option is for translators. // this will compress all available translations that are written in exd. case "compress": CompressTranslations(exHeaders, outputDir); break; // this hidden option is for translators. // this will extract translations from compressed format and place them in exd directory in editable format. case "decompress": ExtractTranslations(exHeaders, outputDir, baseDir); break; // dev option to find certain string from original input dat... case "search": string languageCode = Console.ReadLine(); string keyword = Console.ReadLine(); for (int i = 0; i < exHeaders.Length; i++) { foreach (ExDFile exDat in exHeaders[i].ExDats) { Report(string.Format("{0} / {1}: {2}", i, exHeaders.Length, exDat.Name)); if (exDat.LanguageCode != languageCode) { continue; } foreach (ExDChunk chunk in exDat.Chunks.Values) { foreach (byte[] field in chunk.Fields.Values) { string test = new UTF8Encoding(false).GetString(field); if (test.Contains(keyword)) { Console.WriteLine(); Console.WriteLine(exDat.PhysicalDir + "/" + exDat.Name); } } } } } Console.WriteLine(); Console.WriteLine("DONE"); Console.ReadLine(); break; // dev option for mapping CompleteJournal... case "map_journal": // mapping quest titles... Dictionary <int, string> englishQuest = new Dictionary <int, string>(); Dictionary <int, string> koreanQuest = new Dictionary <int, string>(); for (int i = 0; i < exHeaders.Length; i++) { foreach (ExDFile exDat in exHeaders[i].ExDats) { Report(string.Format("{0} / {1}: {2}", i, exHeaders.Length, exDat.Name)); if (exDat.Dir.ToLower() != "exd" || exDat.HeaderName.ToLower() != "quest") { continue; } if (exDat.LanguageCode != "en") { continue; } foreach (ExDChunk chunk in exDat.Chunks.Values) { if (englishQuest.ContainsKey(chunk.Key)) { continue; } if (chunk.Fields.Count == 0) { continue; } if (!chunk.Fields.ContainsKey(0)) { continue; } JObject jChunk = chunk.GetJObject(); JObject jField = (JObject)jChunk["Fields"].First(j => (ushort)j["FieldKey"] == 0); JArray jEntries = (JArray)jField["FieldValue"]; if (jEntries.Count == 0) { continue; } englishQuest.Add(chunk.Key, jEntries.ToString()); } string exDatOutDir = Path.Combine(outputDir, exDat.PhysicalDir); if (!Directory.Exists(exDatOutDir)) { continue; } string exDatKoPath = Path.Combine(exDatOutDir, "ko"); if (!File.Exists(exDatKoPath)) { continue; } JObject[] jChunks; using (StreamReader sr = new StreamReader(exDatKoPath)) { jChunks = JArray.Parse(sr.ReadToEnd()).Select(j => (JObject)j).ToArray(); } foreach (JObject jChunk in jChunks) { ExDChunk chunk = new ExDChunk(); chunk.LoadJObject(jChunk); if (koreanQuest.ContainsKey(chunk.Key)) { continue; } if (chunk.Fields.Count == 0) { continue; } if (!chunk.Fields.ContainsKey(0)) { continue; } JObject jField = (JObject)jChunk["Fields"].First(j => (ushort)j["FieldKey"] == 0); JArray jEntries = (JArray)jField["FieldValue"]; if (jEntries.Count == 0) { continue; } koreanQuest.Add(chunk.Key, jEntries.ToString()); } } } Dictionary <string, string> englishToKorean = new Dictionary <string, string>(); foreach (int key in englishQuest.Keys) { if (!koreanQuest.ContainsKey(key)) { continue; } if (englishToKorean.ContainsKey(englishQuest[key])) { continue; } englishToKorean.Add(englishQuest[key], koreanQuest[key]); } // mapping content finder titles... Dictionary <string, string> englishContents = new Dictionary <string, string>(); Dictionary <string, string> koreanContents = new Dictionary <string, string>(); for (int i = 0; i < exHeaders.Length; i++) { foreach (ExDFile exDat in exHeaders[i].ExDats) { Report(string.Format("{0} / {1}: {2}", i, exHeaders.Length, exDat.Name)); if (exDat.Dir.ToLower() != "exd" || exDat.HeaderName.ToLower() != "contentfindercondition") { continue; } if (exDat.LanguageCode != "en") { continue; } foreach (ExDChunk chunk in exDat.Chunks.Values) { if (chunk.Fields.Count != 2) { continue; } if (!chunk.Fields.ContainsKey(0) || !chunk.Fields.ContainsKey(44)) { continue; } JObject jChunk = chunk.GetJObject(); JArray jFields = (JArray)jChunk["Fields"]; JObject jKeyFIeld = (JObject)jFields.First(j => (ushort)j["FieldKey"] == 44); JArray jKeyEntries = (JArray)jKeyFIeld["FieldValue"]; if (jKeyEntries.Count != 1) { continue; } if ((string)jKeyEntries[0]["EntryType"] != "text") { continue; } string fieldKey = (string)jKeyEntries[0]["EntryValue"]; if (englishContents.ContainsKey(fieldKey)) { continue; } JObject jValueField = (JObject)jFields.First(j => (ushort)j["FieldKey"] == 0); JArray jValueEntries = (JArray)jValueField["FieldValue"]; if (jValueEntries.Count == 0) { continue; } englishContents.Add(fieldKey, jValueEntries.ToString()); } string exDatOutDir = Path.Combine(outputDir, exDat.PhysicalDir); if (!Directory.Exists(exDatOutDir)) { continue; } string exDatKoPath = Path.Combine(exDatOutDir, "ko"); if (!File.Exists(exDatKoPath)) { continue; } JObject[] jChunks; using (StreamReader sr = new StreamReader(exDatKoPath)) { jChunks = JArray.Parse(sr.ReadToEnd()).Select(j => (JObject)j).ToArray(); } foreach (JObject jChunk in jChunks) { ExDChunk chunk = new ExDChunk(); chunk.LoadJObject(jChunk); if (chunk.Fields.Count != 2) { continue; } if (!chunk.Fields.ContainsKey(0) || !chunk.Fields.ContainsKey(44)) { continue; } JArray jFields = (JArray)jChunk["Fields"]; JObject jKeyField = (JObject)jFields.First(j => (ushort)j["FieldKey"] == 44); JArray jKeyEntries = (JArray)jKeyField["FieldValue"]; if (jKeyEntries.Count != 1) { continue; } if ((string)jKeyEntries[0]["EntryType"] != "text") { continue; } string fieldKey = (string)jKeyEntries[0]["EntryValue"]; if (koreanContents.ContainsKey(fieldKey)) { continue; } JObject jValueField = (JObject)jFields.First(j => (ushort)j["FieldKey"] == 0); JArray jValueEntries = (JArray)jValueField["FieldValue"]; if (jValueEntries.Count == 0) { continue; } koreanContents.Add(fieldKey, jValueEntries.ToString()); } } } foreach (string key in englishContents.Keys) { if (!koreanContents.ContainsKey(key)) { continue; } if (englishToKorean.ContainsKey(englishContents[key])) { continue; } englishToKorean.Add(englishContents[key], koreanContents[key]); } for (int i = 0; i < exHeaders.Length; i++) { foreach (ExDFile exDat in exHeaders[i].ExDats) { Report(string.Format("{0} / {1}: {2}", i, exHeaders.Length, exDat.Name)); if (exDat.Dir.ToLower() != "exd" || exDat.HeaderName.ToLower() != "completejournal") { continue; } if (exDat.LanguageCode != "en") { continue; } foreach (ExDChunk chunk in exDat.Chunks.Values) { if (chunk.Fields.Count != 1) { continue; } if (!chunk.Fields.ContainsKey(0)) { continue; } JObject jChunk = chunk.GetJObject(); JArray jFieldArray = (JArray)jChunk["Fields"]; JObject jField = (JObject)jFieldArray[0]; if ((ushort)jField["FieldKey"] != 0) { continue; } JArray jEntries = (JArray)jField["FieldValue"]; if (jEntries.Count == 0) { continue; } if (!englishToKorean.ContainsKey(jEntries.ToString())) { continue; } jField["FieldValue"] = JArray.Parse(englishToKorean[jEntries.ToString()]); chunk.LoadJObject(jChunk); } exDat.LanguageCode = "trans"; exDat.ExtractExD(outputDir); } } break; // testing case "test": for (int i = 0; i < exHeaders.Length; i++) { foreach (ExDFile exDat in exHeaders[i].ExDats) { Report(string.Format("{0} / {1}: {2}", i, exHeaders.Length, exDat.Name)); string exDatOutDir = Path.Combine(outputDir, exDat.PhysicalDir); if (!Directory.Exists(exDatOutDir)) { continue; } string exDatOutPath = Path.Combine(exDatOutDir, exDat.LanguageCode); string exDatTestPath = exDatOutPath + ".test"; using (MemoryStream ms = new MemoryStream(File.ReadAllBytes(exDatOutPath))) using (StreamReader sr = new StreamReader(ms)) using (MemoryStream ms2 = new MemoryStream(File.ReadAllBytes(exDatTestPath))) using (StreamReader sr2 = new StreamReader(ms2)) { while (sr.Peek() != -1) { if (sr2.Peek() == -1) { throw new Exception(); } if (sr.ReadLine() != sr2.ReadLine()) { throw new Exception(); } } if (sr2.Peek() != -1) { throw new Exception(); } } } } Console.WriteLine(); Console.WriteLine("DONE"); Console.ReadLine(); break; // write out encoded bytes case "write": string test2 = Console.ReadLine(); JObject test3 = JObject.Parse("{\"TagValue\":\"" + test2 + "\"}"); File.WriteAllBytes(Path.Combine(outputDir, "test"), (byte[])test3["TagValue"]); break; } }