public DbcStore(IParameterFactory parameterFactory, ITaskRunner taskRunner, IDbcSettingsProvider settingsProvider, IMessageBoxService messageBoxService, IEventAggregator eventAggregator, ICurrentCoreVersion currentCoreVersion, NullSpellService nullSpellService, CataSpellService cataSpellService, WrathSpellService wrathSpellService, DBCD.DBCD dbcd) { this.parameterFactory = parameterFactory; this.taskRunner = taskRunner; dbcSettingsProvider = settingsProvider; this.messageBoxService = messageBoxService; this.eventAggregator = eventAggregator; this.currentCoreVersion = currentCoreVersion; this.nullSpellService = nullSpellService; this.cataSpellService = cataSpellService; this.wrathSpellService = wrathSpellService; this.dbcd = dbcd; spellServiceImpl = nullSpellService; Load(); }
static void Main(string[] args) { var dbcd = new DBCD.DBCD(new DBCProvider(), new DBDProvider()); var emotes = dbcd.Load("Emotes.db2"); using (var writer = new StreamWriter("emotes.txt")) { foreach (var id in emotes.Keys) { //under this ID emotes are new. if (id <= 621) { continue; } var emoteName = emotes.GetField <string>(id, "EmoteSlashCommand"); if (string.IsNullOrWhiteSpace(emoteName)) { continue; } emoteName = emoteName.ToUpper(); if (emoteName.Contains("(") && emoteName.Contains(")")) { var idxOfStart = emoteName.IndexOf("(") - 1; //< Take in account the extra whitespace. var idxOfStop = emoteName.IndexOf(")"); Console.WriteLine($"IndexOfStart: {idxOfStart} in {emoteName}"); Console.WriteLine($"IndexOfStop: {idxOfStop} in {emoteName}"); emoteName = emoteName.Replace(emoteName.Substring(idxOfStart, idxOfStop - idxOfStart + 1), ""); Console.WriteLine($"New Name: {emoteName}\n"); } emoteName = emoteName.Replace(" ", "_"); writer.WriteLine($"EMOTE_{emoteName,-64} = {id},"); } } }
public string[] LoadFiles(string[] files) { var loadedFiles = new List <string>(); var dbcd = new DBCD.DBCD(dbcProvider, dbdProvider); Stopwatch stopWatch = null; foreach (string db2Path in files) { string db2Name = Path.GetFileName(db2Path); try { DefinitionSelect definitionSelect = new DefinitionSelect(); definitionSelect.SetDB2Name(db2Name); definitionSelect.SetDefinitionFromVersionDefinitions(GetVersionDefinitionsForDB2(db2Path)); definitionSelect.ShowDialog(); if (definitionSelect.IsCanceled) { continue; } stopWatch = new Stopwatch(); var storage = dbcd.Load(db2Path, definitionSelect.SelectedVersion, definitionSelect.SelectedLocale); if (LoadedDBFiles.ContainsKey(db2Path)) { loadedFiles.Add(db2Path); } else if (LoadedDBFiles.TryAdd(db2Path, storage)) { loadedFiles.Add(db2Path); } stopWatch.Stop(); Console.WriteLine($"Loading File: {db2Name} Elapsed Time: {stopWatch.Elapsed}"); } catch (AggregateException) { MessageBox.Show( string.Format("Cant find defenitions for {0}.\nCheck your Filename and note upper and lower case", db2Name), "WDBXEditor2", MessageBoxButton.OK, MessageBoxImage.Warning ); } catch (Exception ex) { Console.WriteLine(ex); MessageBox.Show( string.Format("Cant load {0}.\n{1}", db2Name, ex.Message), "WDBXEditor2", MessageBoxButton.OK, MessageBoxImage.Warning ); } } return(loadedFiles.ToArray()); }
static void Main(string[] args) { if (args.Length == 0) { Console.WriteLine("Expected argument: db2filename or db2folder"); return; } var filesToExport = new List <string>(); foreach (var arg in args) { if (arg.EndsWith(".db2") || arg.EndsWith(".dbc")) { if (!File.Exists(arg)) { Console.WriteLine("Input DB2 file or folder could not be found: " + arg); return; } filesToExport.Add(arg); } } var inputArg = args[0]; var baseDir = ""; FileAttributes attr = File.GetAttributes(inputArg); if (attr.HasFlag(FileAttributes.Directory)) { filesToExport.AddRange(Directory.EnumerateFiles(inputArg, "*.db2", SearchOption.TopDirectoryOnly).ToList()); filesToExport.AddRange(Directory.EnumerateFiles(inputArg, "*.dbc", SearchOption.TopDirectoryOnly).ToList()); baseDir = inputArg; } else { baseDir = Path.GetDirectoryName(inputArg); } var newLinesInStrings = true; var dbcd = new DBCD.DBCD(new DBCProvider(baseDir), new DBDProvider()); foreach (var fileToExport in filesToExport) { var tableName = Path.GetFileNameWithoutExtension(fileToExport); Console.WriteLine("Exporting DBC " + tableName); try { var storage = dbcd.Load(tableName); if (!storage.Values.Any()) { throw new Exception("No rows found!"); } var headerWritten = false; using (var exportStream = new MemoryStream()) using (var exportWriter = new StreamWriter(exportStream)) { foreach (DBCDRow item in storage.Values) { // Write CSV header if (!headerWritten) { for (var j = 0; j < storage.AvailableColumns.Length; ++j) { string fieldname = storage.AvailableColumns[j]; var field = item[fieldname]; var isEndOfRecord = j == storage.AvailableColumns.Length - 1; if (field is Array a) { for (var i = 0; i < a.Length; i++) { var isEndOfArray = a.Length - 1 == i; exportWriter.Write($"{fieldname}[{i}]"); if (!isEndOfArray) { exportWriter.Write(","); } } } else { exportWriter.Write(fieldname); } if (!isEndOfRecord) { exportWriter.Write(","); } } headerWritten = true; exportWriter.WriteLine(); } for (var i = 0; i < storage.AvailableColumns.Length; ++i) { var field = item[storage.AvailableColumns[i]]; var isEndOfRecord = i == storage.AvailableColumns.Length - 1; if (field is Array a) { for (var j = 0; j < a.Length; j++) { var isEndOfArray = a.Length - 1 == j; exportWriter.Write(a.GetValue(j)); if (!isEndOfArray) { exportWriter.Write(","); } } } else { var value = field; if (value.GetType() == typeof(string)) { value = StringToCSVCell((string)value, newLinesInStrings); } exportWriter.Write(value); } if (!isEndOfRecord) { exportWriter.Write(","); } } exportWriter.WriteLine(); } exportWriter.Dispose(); File.WriteAllBytes(tableName + ".csv", exportStream.ToArray()); } } catch (Exception e) { Console.WriteLine("Failed to export DB2 " + tableName + ": " + e.Message); } } }
public static void ExportADT(uint wdtFileDataID, byte tileX, byte tileY, BackgroundWorker exportworker = null) { if (exportworker == null) { exportworker = new BackgroundWorker(); exportworker.WorkerReportsProgress = true; } var outdir = ConfigurationManager.AppSettings["outdir"]; var customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); customCulture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; var MaxSize = 51200 / 3.0; var TileSize = MaxSize / 32.0; var ChunkSize = TileSize / 16.0; var UnitSize = ChunkSize / 8.0; var UnitSizeHalf = UnitSize / 2.0; if (!Listfile.TryGetFilename(wdtFileDataID, out string wdtFilename)) { Logger.WriteLine("ADT OBJ Exporter: WDT {0} has no known filename, skipping export!", wdtFileDataID); return; } var mapName = Path.GetFileNameWithoutExtension(wdtFilename); var file = "world/maps/" + mapName + "/" + mapName + "_" + tileX.ToString() + "_" + tileY.ToString() + ".adt"; var reader = new ADTReader(); reader.LoadADT(wdtFileDataID, tileX, tileY, true, wdtFilename); if (reader.adtfile.chunks == null) { Logger.WriteLine("ADT OBJ Exporter: File {0} has no chunks, skipping export!", file); return; } Logger.WriteLine("ADT OBJ Exporter: Starting export of {0}..", file); if (!Directory.Exists(Path.Combine(outdir, Path.GetDirectoryName(file)))) { Directory.CreateDirectory(Path.Combine(outdir, Path.GetDirectoryName(file))); } exportworker.ReportProgress(0, "Loading ADT " + file); var renderBatches = new List <Structs.RenderBatch>(); var verticelist = new List <Structs.Vertex>(); var indicelist = new List <int>(); var materials = new Dictionary <int, string>(); ConfigurationManager.RefreshSection("appSettings"); var bakeQuality = ConfigurationManager.AppSettings["bakeQuality"]; // Calculate ADT offset in world coordinates var adtStartX = ((reader.adtfile.x - 32) * TileSize) * -1; var adtStartY = ((reader.adtfile.y - 32) * TileSize) * -1; // Calculate first chunk offset in world coordinates var initialChunkX = adtStartY + (reader.adtfile.chunks[0].header.indexX * ChunkSize) * -1; var initialChunkY = adtStartX + (reader.adtfile.chunks[0].header.indexY * ChunkSize) * -1; uint ci = 0; for (var x = 0; x < 16; x++) { double xOfs = x / 16d; for (var y = 0; y < 16; y++) { double yOfs = y / 16d; var genx = (initialChunkX + (ChunkSize * x) * -1); var geny = (initialChunkY + (ChunkSize * y) * -1); var chunk = reader.adtfile.chunks[ci]; var off = verticelist.Count(); var batch = new Structs.RenderBatch(); for (int i = 0, idx = 0; i < 17; i++) { bool isSmallRow = (i % 2) != 0; int rowLength = isSmallRow ? 8 : 9; for (var j = 0; j < rowLength; j++) { var v = new Structs.Vertex(); v.Normal = new Structs.Vector3D { X = (double)chunk.normals.normal_0[idx] / 127, Y = (double)chunk.normals.normal_2[idx] / 127, Z = (double)chunk.normals.normal_1[idx] / 127 }; var px = geny - (j * UnitSize); var py = chunk.vertices.vertices[idx++] + chunk.header.position.Z; var pz = genx - (i * UnitSizeHalf); v.Position = new Structs.Vector3D { X = px, Y = py, Z = pz }; if ((i % 2) != 0) { v.Position.X = (px - UnitSizeHalf); } double ofs = j; if (isSmallRow) { ofs += 0.5; } if (bakeQuality == "high") { double tx = ofs / 8d; double ty = 1 - (i / 16d); v.TexCoord = new Structs.Vector2D { X = tx, Y = ty }; } else { double tx = -(v.Position.X - initialChunkY) / TileSize; double ty = (v.Position.Z - initialChunkX) / TileSize; v.TexCoord = new Structs.Vector2D { X = tx, Y = ty }; } verticelist.Add(v); } } batch.firstFace = (uint)indicelist.Count(); // Stupid C# and its structs var holesHighRes = new byte[8]; holesHighRes[0] = chunk.header.holesHighRes_0; holesHighRes[1] = chunk.header.holesHighRes_1; holesHighRes[2] = chunk.header.holesHighRes_2; holesHighRes[3] = chunk.header.holesHighRes_3; holesHighRes[4] = chunk.header.holesHighRes_4; holesHighRes[5] = chunk.header.holesHighRes_5; holesHighRes[6] = chunk.header.holesHighRes_6; holesHighRes[7] = chunk.header.holesHighRes_7; for (int j = 9, xx = 0, yy = 0; j < 145; j++, xx++) { if (xx >= 8) { xx = 0; ++yy; } var isHole = true; // Check if chunk is using low-res holes if ((chunk.header.flags & 0x10000) == 0) { // Calculate current hole number var currentHole = (int)Math.Pow(2, Math.Floor(xx / 2f) * 1f + Math.Floor(yy / 2f) * 4f); // Check if current hole number should be a hole if ((chunk.header.holesLowRes & currentHole) == 0) { isHole = false; } } else { // Check if current section is a hole if (((holesHighRes[yy] >> xx) & 1) == 0) { isHole = false; } } if (!isHole) { indicelist.AddRange(new int[] { off + j + 8, off + j - 9, off + j }); indicelist.AddRange(new int[] { off + j - 9, off + j - 8, off + j }); indicelist.AddRange(new int[] { off + j - 8, off + j + 9, off + j }); indicelist.AddRange(new int[] { off + j + 9, off + j + 8, off + j }); // Generates quads instead of 4x triangles //indicelist.AddRange(new int[] { off + j + 8, off + j - 9, off + j - 8 }); //indicelist.AddRange(new int[] { off + j - 8, off + j + 9, off + j + 8 }); } if ((j + 1) % (9 + 8) == 0) { j += 9; } } if (bakeQuality == "high") { materials.Add((int)ci + 1, Path.GetFileNameWithoutExtension(file) + "_" + ci); batch.materialID = ci + 1; } else { if (!materials.ContainsKey(1)) { materials.Add(1, Path.GetFileNameWithoutExtension(file)); } batch.materialID = (uint)materials.Count(); } batch.numFaces = (uint)(indicelist.Count()) - batch.firstFace; var layermats = new List <uint>(); renderBatches.Add(batch); ci++; } } ConfigurationManager.RefreshSection("appSettings"); bool exportWMO = ConfigurationManager.AppSettings["exportWMO"] == "True"; bool exportM2 = ConfigurationManager.AppSettings["exportM2"] == "True"; bool exportFoliage = ConfigurationManager.AppSettings["exportFoliage"] == "True"; if (exportFoliage) { exportworker.ReportProgress(65, "Exporting foliage"); try { var build = WoWFormatLib.Utils.CASC.BuildName; var dbcd = new DBCD.DBCD(new DBC.CASCDBCProvider(), new GithubDBDProvider()); var groundEffectTextureDB = dbcd.Load("GroundEffectTexture"); var groundEffectDoodadDB = dbcd.Load("GroundEffectDoodad"); for (var c = 0; c < reader.adtfile.texChunks.Length; c++) { for (var l = 0; l < reader.adtfile.texChunks[c].layers.Length; l++) { var effectID = reader.adtfile.texChunks[c].layers[l].effectId; if (effectID == 0) { continue; } if (!groundEffectTextureDB.ContainsKey(effectID)) { Console.WriteLine("Could not find groundEffectTexture entry " + reader.adtfile.texChunks[c].layers[l].effectId); continue; } dynamic textureEntry = groundEffectTextureDB[effectID]; foreach (int doodad in textureEntry.DoodadID) { if (!groundEffectDoodadDB.ContainsKey(doodad)) { Console.WriteLine("Could not find groundEffectDoodad entry " + doodad); continue; } dynamic doodadEntry = groundEffectDoodadDB[doodad]; var filedataid = (uint)doodadEntry.ModelFileID; if (!Listfile.TryGetFilename(filedataid, out var filename)) { Logger.WriteLine("Could not find filename for " + filedataid + ", setting filename to filedataid.."); filename = filedataid.ToString(); } if (!File.Exists(Path.Combine(outdir, Path.GetDirectoryName(file), "foliage"))) { Directory.CreateDirectory(Path.Combine(outdir, Path.GetDirectoryName(file), "foliage")); } if (!File.Exists(Path.Combine(outdir, Path.GetDirectoryName(file), "foliage", Path.GetFileNameWithoutExtension(filename).ToLower() + ".obj"))) { M2Exporter.ExportM2(filedataid, null, Path.Combine(outdir, Path.GetDirectoryName(file), "foliage"), filename); } } } } } catch (Exception e) { Logger.WriteLine("Error exporting GroundEffects: " + e.Message); } } if (exportWMO || exportM2) { var doodadSW = new StreamWriter(Path.Combine(outdir, Path.GetDirectoryName(file), Path.GetFileNameWithoutExtension(file).Replace(" ", "") + "_ModelPlacementInformation.csv")); doodadSW.WriteLine("ModelFile;PositionX;PositionY;PositionZ;RotationX;RotationY;RotationZ;ScaleFactor;ModelId;Type"); if (exportWMO) { exportworker.ReportProgress(25, "Exporting WMOs"); for (var mi = 0; mi < reader.adtfile.objects.worldModels.entries.Count(); mi++) { var wmo = reader.adtfile.objects.worldModels.entries[mi]; var filename = ""; uint filedataid = 0; if (reader.adtfile.objects.wmoNames.filenames == null) { filedataid = wmo.mwidEntry; if (!Listfile.TryGetFilename(filedataid, out filename)) { Logger.WriteLine("Warning! Could not find filename for " + filedataid + ", setting filename to filedataid.."); filename = filedataid.ToString(); } } else { Logger.WriteLine("Warning!! File " + filename + " ID: " + filedataid + " still has filenames!"); filename = reader.adtfile.objects.wmoNames.filenames[wmo.mwidEntry]; if (!Listfile.TryGetFileDataID(filename, out filedataid)) { Logger.WriteLine("Error! Could not find filedataid for " + filename + "!"); continue; } } if (string.IsNullOrEmpty(filename)) { if (!File.Exists(Path.Combine(outdir, Path.GetDirectoryName(file), filedataid.ToString() + ".obj"))) { WMOExporter.ExportWMO(filedataid, exportworker, Path.Combine(outdir, Path.GetDirectoryName(file)), wmo.doodadSet); } if (File.Exists(Path.Combine(outdir, Path.GetDirectoryName(file), filedataid.ToString() + ".obj"))) { doodadSW.WriteLine(filedataid + ".obj;" + wmo.position.X + ";" + wmo.position.Y + ";" + wmo.position.Z + ";" + wmo.rotation.X + ";" + wmo.rotation.Y + ";" + wmo.rotation.Z + ";" + wmo.scale / 1024f + ";" + wmo.uniqueId + ";wmo"); } } else { if (!File.Exists(Path.Combine(outdir, Path.GetDirectoryName(file), Path.GetFileNameWithoutExtension(filename).ToLower() + ".obj"))) { WMOExporter.ExportWMO(filedataid, exportworker, Path.Combine(outdir, Path.GetDirectoryName(file)), wmo.doodadSet, filename); } if (File.Exists(Path.Combine(outdir, Path.GetDirectoryName(file), Path.GetFileNameWithoutExtension(filename).ToLower() + ".obj"))) { doodadSW.WriteLine(Path.GetFileNameWithoutExtension(filename).ToLower() + ".obj;" + wmo.position.X + ";" + wmo.position.Y + ";" + wmo.position.Z + ";" + wmo.rotation.X + ";" + wmo.rotation.Y + ";" + wmo.rotation.Z + ";" + wmo.scale / 1024f + ";" + wmo.uniqueId + ";wmo"); } } } } if (exportM2) { exportworker.ReportProgress(50, "Exporting M2s"); for (var mi = 0; mi < reader.adtfile.objects.models.entries.Count(); mi++) { var doodad = reader.adtfile.objects.models.entries[mi]; string filename; uint filedataid; if (reader.adtfile.objects.wmoNames.filenames == null) { filedataid = doodad.mmidEntry; if (!Listfile.TryGetFilename(filedataid, out filename)) { Logger.WriteLine("Could not find filename for " + filedataid + ", setting filename to filedataid.."); filename = filedataid.ToString(); } } else { filename = reader.adtfile.objects.wmoNames.filenames[doodad.mmidEntry]; if (!Listfile.TryGetFileDataID(filename, out filedataid)) { Logger.WriteLine("Error! Could not find filedataid for " + filename + "!"); continue; } } if (!File.Exists(Path.Combine(outdir, Path.GetDirectoryName(file), Path.GetFileNameWithoutExtension(filename).ToLower() + ".obj"))) { M2Exporter.ExportM2(filedataid, null, Path.Combine(outdir, Path.GetDirectoryName(file)), filename); } if (File.Exists(Path.Combine(outdir, Path.GetDirectoryName(file), Path.GetFileNameWithoutExtension(filename).ToLower() + ".obj"))) { doodadSW.WriteLine(Path.GetFileNameWithoutExtension(filename).ToLower() + ".obj;" + doodad.position.X + ";" + doodad.position.Y + ";" + doodad.position.Z + ";" + doodad.rotation.X + ";" + doodad.rotation.Y + ";" + doodad.rotation.Z + ";" + doodad.scale / 1024f + ";" + doodad.uniqueId + ";m2"); } } } doodadSW.Close(); } exportworker.ReportProgress(75, "Exporting terrain textures.."); if (bakeQuality != "none") { var mtlsw = new StreamWriter(Path.Combine(outdir, Path.GetDirectoryName(file), Path.GetFileNameWithoutExtension(file).Replace(" ", "") + ".mtl")); //No idea how MTL files really work yet. Needs more investigation. foreach (var material in materials) { mtlsw.WriteLine("newmtl " + material.Value.Replace(" ", "")); mtlsw.WriteLine("Ka 1.000000 1.000000 1.000000"); mtlsw.WriteLine("Kd 0.640000 0.640000 0.640000"); mtlsw.WriteLine("map_Ka " + material.Value.Replace(" ", "") + ".png"); mtlsw.WriteLine("map_Kd " + material.Value.Replace(" ", "") + ".png"); } mtlsw.Close(); } exportworker.ReportProgress(85, "Exporting terrain geometry.."); var indices = indicelist.ToArray(); var adtname = Path.GetFileNameWithoutExtension(file); var objsw = new StreamWriter(Path.Combine(outdir, Path.GetDirectoryName(file), Path.GetFileNameWithoutExtension(file).Replace(" ", "") + ".obj")); objsw.WriteLine("# Written by Marlamin's WoW OBJExporter. Original file: " + file); if (bakeQuality != "none") { objsw.WriteLine("mtllib " + Path.GetFileNameWithoutExtension(file).Replace(" ", "") + ".mtl"); } objsw.WriteLine("g " + adtname.Replace(" ", "")); var verticeCounter = 1; var chunkCounter = 1; foreach (var vertex in verticelist) { objsw.WriteLine("# C" + chunkCounter + ".V" + verticeCounter); objsw.WriteLine("v " + vertex.Position.X.ToString("R") + " " + vertex.Position.Y.ToString("R") + " " + vertex.Position.Z.ToString("R")); objsw.WriteLine("vt " + vertex.TexCoord.X + " " + vertex.TexCoord.Y); objsw.WriteLine("vn " + vertex.Normal.X.ToString("R") + " " + vertex.Normal.Y.ToString("R") + " " + vertex.Normal.Z.ToString("R")); verticeCounter++; if (verticeCounter == 146) { chunkCounter++; verticeCounter = 1; } } if (bakeQuality != "high") { objsw.WriteLine("usemtl " + materials[1]); objsw.WriteLine("s 1"); } foreach (var renderBatch in renderBatches) { var i = renderBatch.firstFace; if (bakeQuality == "high" && materials.ContainsKey((int)renderBatch.materialID)) { objsw.WriteLine("usemtl " + materials[(int)renderBatch.materialID]); } while (i < (renderBatch.firstFace + renderBatch.numFaces)) { objsw.WriteLine("f " + (indices[i + 2] + 1) + "/" + (indices[i + 2] + 1) + "/" + (indices[i + 2] + 1) + " " + (indices[i + 1] + 1) + "/" + (indices[i + 1] + 1) + "/" + (indices[i + 1] + 1) + " " + (indices[i] + 1) + "/" + (indices[i] + 1) + "/" + (indices[i] + 1)); i = i + 3; } } objsw.Close(); Logger.WriteLine("ADT OBJ Exporter: Finished with export of {0}..", file); }
static void Backup() { var dbcd = new DBCD.DBCD(new DBCProvider(), new DBDProvider()); ChrClasses = dbcd.Load($"{DB2Path}/ChrClasses.db2"); var classInfo = new List <ClassInformation>(); foreach (var id in ChrClasses.Keys) { var defaultSpec = ChrClasses.GetField <ushort>(id, "DefaultSpec"); var powerType = ChrClasses.GetField <byte>(id, "DisplayPower"); classInfo.Add(new ClassInformation { ClassID = id, DefaultSpec = defaultSpec, DisplayPower = powerType, }); } WriteRecords("ClassInformation.csv", classInfo); var raceDict = new Dictionary <uint, byte> { { 1, 1 }, { 2, 2 }, { 4, 3 }, { 8, 4 }, { 16, 5 }, { 32, 6 }, { 64, 7 }, { 128, 8 }, { 256, 9 }, { 512, 10 }, { 1024, 11 }, { 2097152, 22 }, { 8388608, 24 }, { 16777216, 25 }, { 33554432, 26 }, { 67108864, 27 }, { 134217728, 28 }, { 268435456, 29 }, { 536870912, 30 }, { 1073741824, 31 }, { 2147483648, 32 }, { 2048, 34 }, { 4096, 35 }, { 8192, 36 }, { 16384, 37 } }; CharacterLoadout = dbcd.Load($"{DB2Path}/CharacterLoadout.db2"); CharacterLoadoutItem = dbcd.Load($"{DB2Path}/CharacterLoadoutItem.db2"); var charLoadout = new Dictionary <(byte, int), List <uint> >(); foreach (var id in CharacterLoadoutItem.Keys) { var loadoutId = CharacterLoadoutItem.GetField <int>(id, "CharacterLoadoutID"); if (loadoutId == 0) { continue; } var purpose = CharacterLoadout.GetField <int>(loadoutId, "Purpose"); if (purpose != 9) { continue; } var raceMask = CharacterLoadout.GetField <uint>(loadoutId, "Racemask"); if (!raceDict.TryGetValue(raceMask, out var raceId)) { Console.WriteLine($"Unknown Racemask: {raceMask}"); continue; } var itemId = CharacterLoadoutItem.GetField <uint>(id, "ItemID"); if (itemId == 0) { continue; } var classId = CharacterLoadout.GetField <int>(loadoutId, "ChrClassID"); if (!charLoadout.ContainsKey((raceId, classId))) { charLoadout.Add((raceId, classId), new List <uint>()); } charLoadout[(raceId, classId)].Add(itemId);