public void LoadFileDataComplete(CASCHandler casc) { if (!casc.FileExists("DBFilesClient\\FileDataComplete.db2")) { return; } Logger.WriteLine("WowRootHandler: loading file names from FileDataComplete.db2..."); using (var s = casc.OpenFile("DBFilesClient\\FileDataComplete.db2")) { WDC1Reader fd = new WDC1Reader(s); Jenkins96 hasher = new Jenkins96(); foreach (var row in fd) { string path = row.Value.GetField <string>(0); string name = row.Value.GetField <string>(1); string fullname = path + name; ulong fileHash = hasher.ComputeHash(fullname); // skip invalid names if (!casc.FileExists(fileHash)) { //Logger.WriteLine("Invalid file name: {0}", fullname); continue; } CASCFile.Files[fileHash] = new CASCFile(fileHash, fullname); } } }
public Storage(string fileName) { DB2Reader reader; var stream = File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.Read); using (var bin = new BinaryReader(stream)) { var identifier = new string(bin.ReadChars(4)); stream.Position = 0; switch (identifier) { case "WDC3": reader = new WDC3Reader(stream); break; case "WDC2": reader = new WDC2Reader(stream); break; case "WDC1": reader = new WDC1Reader(stream); break; default: throw new Exception("DBC type " + identifier + " is not supported!"); } } FieldInfo[] fields = typeof(T).GetFields(); FieldCache <T>[] fieldCache = new FieldCache <T> [fields.Length]; for (int i = 0; i < fields.Length; ++i) { bool indexMapAttribute = reader.Flags.HasFlagExt(DB2Flags.Index) ? Attribute.IsDefined(fields[i], typeof(IndexAttribute)) : false; fieldCache[i] = new FieldCache <T>(fields[i], fields[i].FieldType.IsArray, fields[i].GetSetter <T>(), indexMapAttribute); } Parallel.ForEach(reader.AsEnumerable(), row => { T entry = new T(); row.Value.GetFields(fieldCache, entry); TryAdd(row.Key, entry); }); }
public Storage(Stream stream) { DB2Reader reader; using (stream) using (var bin = new BinaryReader(stream)) { var identifier = new string(bin.ReadChars(4)); stream.Position = 0; switch (identifier) { case "WDC3": reader = new WDC3Reader(stream); break; case "WDC2": case "1SLC": reader = new WDC2Reader(stream); break; case "WDC1": reader = new WDC1Reader(stream); break; case "WDB6": reader = new WDB6Reader(stream); break; case "WDB5": reader = new WDB5Reader(stream); break; case "WDB4": reader = new WDB4Reader(stream); break; case "WDB3": reader = new WDB3Reader(stream); break; case "WDB2": reader = new WDB2Reader(stream); break; case "WDBC": reader = new WDBCReader(stream); break; default: throw new Exception("DB type " + identifier + " is not supported!"); } } FieldInfo[] fields = typeof(T).GetFields(); FieldCache <T>[] fieldCache = new FieldCache <T> [fields.Length]; for (int i = 0; i < fields.Length; ++i) { bool indexMapAttribute = reader.Flags.HasFlagExt(DB2Flags.Index) ? Attribute.IsDefined(fields[i], typeof(IndexAttribute)) : false; fieldCache[i] = new FieldCache <T>(fields[i], indexMapAttribute); } Parallel.ForEach(reader.AsEnumerable(), new ParallelOptions() { MaxDegreeOfParallelism = 1 }, row => { T entry = new T(); row.Value.GetFields(fieldCache, entry); lock (this) Add(row.Value.Id, entry); }); }
public async Task AnalyzeUnknownFiles(Action <int> progressCallback) { if (_casc == null) { return; } IProgress <int> progress = new Progress <int>(progressCallback); await Task.Run(() => { FileScanner scanner = new FileScanner(_casc, _root); Dictionary <uint, List <string> > idToName = new Dictionary <uint, List <string> >(); if (_casc.Config.GameType == CASCGameType.WoW) { if (_casc.FileExists("DBFilesClient\\SoundEntries.db2")) { using (Stream stream = _casc.OpenFile("DBFilesClient\\SoundEntries.db2")) { DB2Reader se = new DB2Reader(stream); foreach (var row in se) { string name = row.Value.GetField <string>(2); int type = row.Value.GetField <int>(1); bool many = row.Value.GetField <int>(4) > 0; for (int i = 3; i < 23; i++) { uint id = row.Value.GetField <uint>(i); if (!idToName.ContainsKey(id)) { idToName[id] = new List <string>(); } idToName[id].Add("unknown\\sound\\" + name + (many ? "_" + (i - 2).ToString("D2") : "") + (type == 28 ? ".mp3" : ".ogg")); } } } } if (_casc.FileExists("DBFilesClient\\SoundKit.db2") && _casc.FileExists("DBFilesClient\\SoundKitEntry.db2") && _casc.FileExists("DBFilesClient\\SoundKitName.db2")) { using (Stream skStream = _casc.OpenFile("DBFilesClient\\SoundKit.db2")) using (Stream skeStream = _casc.OpenFile("DBFilesClient\\SoundKitEntry.db2")) using (Stream sknStream = _casc.OpenFile("DBFilesClient\\SoundKitName.db2")) { WDC1Reader sk = new WDC1Reader(skStream); WDC1Reader ske = new WDC1Reader(skeStream); WDC1Reader skn = new WDC1Reader(sknStream); Dictionary <uint, List <uint> > lookup = new Dictionary <uint, List <uint> >(); foreach (var row in ske) { uint soundKitId = row.Value.GetField <uint>(0); if (!lookup.ContainsKey(soundKitId)) { lookup[soundKitId] = new List <uint>(); } lookup[soundKitId].Add(row.Value.GetField <uint>(1)); } foreach (var row in sk) { string name = skn.GetRow(row.Key).GetField <string>(0).Replace(':', '_'); int type = row.Value.GetField <byte>(6); if (!lookup.TryGetValue(row.Key, out List <uint> ske_entries)) { continue; } bool many = ske_entries.Count > 1; int i = 0; foreach (var fid in ske_entries) { if (!idToName.ContainsKey(fid)) { idToName[fid] = new List <string>(); } idToName[fid].Add("unknown\\sound\\" + name + (many ? "_" + (i + 1).ToString("D2") : "") + "_" + fid + (type == 28 ? ".mp3" : ".ogg")); i++; } } } } } CASCFolder unknownFolder = _root.GetEntry("unknown") as CASCFolder; if (unknownFolder == null) { return; } IEnumerable <CASCFile> files = CASCFolder.GetFiles(unknownFolder.Entries.Select(kv => kv.Value), null, true).ToList(); int numTotal = files.Count(); int numDone = 0; WowRootHandler wowRoot = _casc.Root as WowRootHandler; Jenkins96 Hasher = new Jenkins96(); char[] PathDelimiters = new char[] { '/', '\\' }; foreach (var unknownEntry in files) { CASCFile unknownFile = unknownEntry as CASCFile; if (idToName.TryGetValue((uint)wowRoot.GetFileDataIdByHash(unknownFile.Hash), out List <string> name)) { if (name.Count == 1) { unknownFile.FullName = name[0]; } else { unknownFolder.Entries.Remove(unknownFile.Name); foreach (var file in name) { Logger.WriteLine(file); string[] parts = file.Split(PathDelimiters); string entryName = parts[parts.Length - 1]; ulong filehash = unknownFile.Hash; CASCFile entry = new CASCFile(filehash, file); CASCFile.Files[filehash] = entry; unknownFolder.Entries[entryName] = entry; } } } else { string ext = scanner.GetFileExtension(unknownFile); unknownFile.FullName += ext; if (ext == ".m2") { using (var m2file = _casc.OpenFile(unknownFile.Hash)) using (var br = new BinaryReader(m2file)) { m2file.Position = 0x14; int nameOffs = br.ReadInt32(); m2file.Position = nameOffs + 8; // + sizeof(MD21) string m2name = br.ReadCString(); unknownFile.FullName = "unknown\\" + m2name + ".m2"; } } } progress.Report((int)(++numDone / (float)numTotal * 100)); } _casc.Root.Dump(); }); }
static void Main(string[] args) { if (args.Length < 2) { Console.WriteLine("Not enough arguments: inputdb2 outputcsv (optional: build)"); return; } var filename = args[0]; var outputcsv = args[1]; if (!File.Exists(filename)) { throw new Exception("Input DB2 file does not exist!"); } if (!Directory.Exists(Path.GetDirectoryName(outputcsv))) { Directory.CreateDirectory(Path.GetDirectoryName(outputcsv)); } var build = ""; if(args.Length == 3) { build = args[2]; } DB2Reader reader; var stream = File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read); using (var bin = new BinaryReader(stream)) { var identifier = new string(bin.ReadChars(4)); stream.Position = 0; switch (identifier) { case "WDC3": reader = new WDC3Reader(stream); break; case "WDC2": reader = new WDC2Reader(stream); break; case "WDC1": reader = new WDC1Reader(stream); break; default: throw new Exception("DBC type " + identifier + " is not supported!"); } } var defs = new Structs.DBDefinition(); foreach (var file in Directory.GetFiles("definitions/")) { if (Path.GetFileNameWithoutExtension(file).ToLower() == Path.GetFileNameWithoutExtension(filename.ToLower())) { defs = new DBDReader().Read(file); } } var writer = new StreamWriter(outputcsv); Structs.VersionDefinitions? versionToUse; if (!Utils.GetVersionDefinitionByLayoutHash(defs, reader.LayoutHash.ToString("X8"), out versionToUse)) { if (!string.IsNullOrWhiteSpace(build)) { if (!Utils.GetVersionDefinitionByBuild(defs, new Build(build), out versionToUse)) { throw new Exception("No valid definition found for this layouthash or build!"); } } else { throw new Exception("No valid definition found for this layouthash and was not able to search by build!"); } } var aName = new AssemblyName("DynamicAssemblyExample"); var ab = AssemblyBuilder.DefineDynamicAssembly(aName, AssemblyBuilderAccess.Run); var mb = ab.DefineDynamicModule(aName.Name); var tb = mb.DefineType(Path.GetFileNameWithoutExtension(filename) + "Struct", TypeAttributes.Public); foreach (var field in versionToUse.Value.definitions) { var fbNumber = tb.DefineField(field.name, DBDefTypeToType(defs.columnDefinitions[field.name].type, field.size, field.isSigned, field.arrLength), FieldAttributes.Public); if (field.isID) { var constructorParameters = new Type[] { }; var constructorInfo = typeof(IndexAttribute).GetConstructor(constructorParameters); var displayNameAttributeBuilder = new CustomAttributeBuilder(constructorInfo, new object[] { }); fbNumber.SetCustomAttribute(displayNameAttributeBuilder); } } var type = tb.CreateType(); var genericType = typeof(Storage<>).MakeGenericType(type); var storage = (IDictionary)Activator.CreateInstance(genericType, filename); if (storage.Values.Count == 0) { throw new Exception("No rows found!"); } var fields = type.GetFields(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.NonPublic | BindingFlags.Instance); var headerWritten = false; foreach (var item in storage.Values) { // Write CSV header if (!headerWritten) { for (var j = 0; j < fields.Length; ++j) { var field = fields[j]; var isEndOfRecord = fields.Length - 1 == j; if (field.FieldType.IsArray) { var a = (Array)field.GetValue(item); for (var i = 0; i < a.Length; i++) { var isEndOfArray = a.Length - 1 == i; writer.Write($"{field.Name}[{i}]"); if (!isEndOfArray) writer.Write(","); } } else { writer.Write(field.Name); } if (!isEndOfRecord) writer.Write(","); } headerWritten = true; writer.WriteLine(); } for (var i = 0; i < fields.Length; ++i) { var field = fields[i]; var isEndOfRecord = fields.Length - 1 == i; if (field.FieldType.IsArray) { var a = (Array)field.GetValue(item); for (var j = 0; j < a.Length; j++) { var isEndOfArray = a.Length - 1 == j; writer.Write(a.GetValue(j)); if (!isEndOfArray) writer.Write(","); } } else { var value = field.GetValue(item); if (value.GetType() == typeof(string)) value = StringToCSVCell((string)value); writer.Write(value); } if (!isEndOfRecord) writer.Write(","); } writer.WriteLine(); } writer.Dispose(); Environment.Exit(0); }
private const uint WDC1FmtSig = 0x31434457; // WDC1 public WDC1Writer(WDC1Reader reader, IDictionary <int, T> storage, Stream stream) : base(reader) { // always 2 empties StringTableSize++; WDC1RowSerializer <T> serializer = new WDC1RowSerializer <T>(this); serializer.Serialize(storage); serializer.GetCopyRows(); RecordsCount = serializer.Records.Count - CopyData.Count; var(commonDataSize, palletDataSize, referenceDataSize) = GetDataSizes(); using (var writer = new BinaryWriter(stream)) { int minIndex = storage.Keys.Min(); int maxIndex = storage.Keys.Max(); int copyTableSize = Flags.HasFlagExt(DB2Flags.Sparse) ? 0 : CopyData.Count * 8; writer.Write(WDC1FmtSig); writer.Write(RecordsCount); writer.Write(FieldsCount); writer.Write(RecordSize); writer.Write(StringTableSize); writer.Write(reader.TableHash); writer.Write(reader.LayoutHash); writer.Write(minIndex); writer.Write(maxIndex); writer.Write(reader.Locale); writer.Write(copyTableSize); writer.Write((ushort)Flags); writer.Write((ushort)IdFieldIndex); writer.Write(FieldsCount); // totalFieldCount writer.Write(reader.PackedDataOffset); writer.Write(ReferenceData.Count > 0 ? 1 : 0); // RelationshipColumnCount writer.Write(0); // sparseTableOffset writer.Write(RecordsCount * 4); // indexTableSize writer.Write(ColumnMeta.Length * 24); // ColumnMetaDataSize writer.Write(commonDataSize); writer.Write(palletDataSize); writer.Write(referenceDataSize); if (storage.Count == 0) { return; } // field meta writer.WriteArray(Meta); // record data uint recordsOffset = (uint)writer.BaseStream.Position; foreach (var record in serializer.Records) { if (!CopyData.TryGetValue(record.Key, out int parent)) { record.Value.CopyTo(writer.BaseStream); } } // string table if (!Flags.HasFlagExt(DB2Flags.Sparse)) { writer.WriteCString(""); foreach (var str in StringTable) { writer.WriteCString(str.Key); } } // sparse data if (Flags.HasFlagExt(DB2Flags.Sparse)) { // set the sparseTableOffset long oldPos = writer.BaseStream.Position; writer.BaseStream.Position = 60; writer.Write((uint)oldPos); writer.BaseStream.Position = oldPos; WriteOffsetRecords(writer, serializer, recordsOffset, maxIndex - minIndex + 1); } // index table if (Flags.HasFlagExt(DB2Flags.Index)) { writer.WriteArray(serializer.Records.Keys.Except(CopyData.Keys).ToArray()); } // copy table if (!Flags.HasFlagExt(DB2Flags.Sparse)) { foreach (var copyRecord in CopyData) { writer.Write(copyRecord.Key); writer.Write(copyRecord.Value); } } // column meta data writer.WriteArray(ColumnMeta); // pallet data for (int i = 0; i < ColumnMeta.Length; i++) { if (ColumnMeta[i].CompressionType == CompressionType.Pallet || ColumnMeta[i].CompressionType == CompressionType.PalletArray) { foreach (var palletData in PalletData[i]) { writer.WriteArray(palletData); } } } // common data for (int i = 0; i < ColumnMeta.Length; i++) { if (ColumnMeta[i].CompressionType == CompressionType.Common) { foreach (var commondata in CommonData[i]) { writer.Write(commondata.Key); writer.Write(commondata.Value.GetValue <int>()); } } } // reference data if (ReferenceData.Count > 0) { writer.Write(ReferenceData.Count); writer.Write(ReferenceData.Min()); writer.Write(ReferenceData.Max()); for (int i = 0; i < ReferenceData.Count; i++) { writer.Write(ReferenceData[i]); writer.Write(i); } } } }