void ParseHeader() { // only read header if we have any data at all, // and if we're not in list parsing mode if (Bytes == null || IgnoreHeader) { return; } using (BinaryReader reader = new BinaryReader(new MemoryStream(Bytes))) { DBFileHeader header = PackedFileDbCodec.readHeader(reader); version = header.Version; if (Bytes != null && header != null) { if (DBTypeMap.Instance.IsSupported(TypeName)) { try { DBFile decoded = new PackedFileDbCodec(TypeName).Decode(Bytes); CurrentTypeInfo = new TypeInfo(decoded.CurrentType); #if DEBUG Console.WriteLine("Found decoding type version {0}", decoded.CurrentType.Version); #endif } catch { List <TypeInfo> infos = DBTypeMap.Instance.GetVersionedInfos(TypeName, header.Version); CurrentTypeInfo = infos[infos.Count - 1]; } } } HeaderLength = header.Length; ExpectedEntries = header.EntryCount; } }
/* * Query if given packed file can be deccoded. * Is not entirely reliable because it only reads the header and checks if a * type definition is available for the given GUID and/or type name and version. * The actual decode tries out all available type infos for that type name * but that is less efficient because it has to read the whole file at least once * if successful. */ public static bool CanDecode(PackedFile packedFile, out string display) { bool result = true; string key = DBFile.Typename(packedFile.FullPath); if (DBTypeMap.Instance.IsSupported(key)) { try { DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); int maxVersion = DBTypeMap.Instance.MaxVersion(key); if (maxVersion != 0 && header.Version > maxVersion) { display = string.Format("{0}: needs {1}, has {2}", key, header.Version, DBTypeMap.Instance.MaxVersion(key)); result = false; } else { display = string.Format("Version: {0}", header.Version); } } catch (Exception x) { display = string.Format("{0}: {1}", key, x.Message); } } else { display = string.Format("{0}: no definition available", key); result = false; } return(result); }
/* * This doesn't really belong here... * changes all strings in an existing table definition to string_asci. */ public void ConvertAllStringsToAscii(string packFile) { PackFile file = new PackFileCodec().Open(packFile); foreach (PackedFile packed in file) { if (packed.FullPath.StartsWith("db")) { string typename = DBFile.Typename(packed.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(packed); if (!string.IsNullOrEmpty(header.GUID)) { // List<FieldInfo> infos = DBTypeMap.Instance.GetInfoByGuid(header.GUID); if (!CanDecode(packed)) { // we don't have an entry for this yet; try out the ones we have List <TypeInfo> allInfos = DBTypeMap.Instance.GetAllInfos(typename); if (allInfos.Count > 0) { // TryDecode(packed, header, allInfos); } else { Console.WriteLine("no info at all for {0}", typename); } } else { Console.WriteLine("already have info for {0}", header.GUID); } } } } }
/* * Create an optimized packed file from the given one. */ PackedFile CreateOptimizedFile(PackedFile toOptimize) { PackedFile result = toOptimize; // special handling for db files; leave all others as they are. if (toOptimize.FullPath.StartsWith("db")) { try { DBFile modDbFile = FromPacked(toOptimize); if (modDbFile != null) { DBFile gameDbFile = FindInGamePacks(toOptimize); if (TypesCompatible(modDbFile, gameDbFile)) { DBFileHeader header = new DBFileHeader(modDbFile.Header); DBFile optimizedFile = new DBFile(header, modDbFile.CurrentType); optimizedFile.Entries.AddRange(GetDifferingRows(modDbFile, gameDbFile)); if (optimizedFile.Entries.Count != 0) { result.Data = PackedFileDbCodec.GetCodec(toOptimize).Encode(optimizedFile); } else { result = null; } } } } catch (Exception e) { Console.Error.WriteLine(e); } } return(result); }
/* * Reads a db file from stream, using the version information * contained in the header read from it. */ public DBFile Decode(Stream stream) { BinaryReader reader = new BinaryReader(stream); reader.BaseStream.Position = 0; DBFileHeader header = readHeader(reader); List <TypeInfo> infos = DBTypeMap.Instance.GetVersionedInfos(typeName, header.Version); if (infos.Count == 0) { infos.AddRange(DBTypeMap.Instance.GetAllInfos(typeName)); } foreach (TypeInfo realInfo in infos) { try { #if DEBUG Console.WriteLine("Parsing version {1} with info {0}", string.Join(",", realInfo.Fields), header.Version); #endif DBFile result = ReadFile(reader, header, realInfo); return(result); } catch (Exception e) { } } return(null); // throw new DBFileNotSupportedException(string.Format("No applicable type definition found")); }
public DBFile ReadFile(BinaryReader reader, DBFileHeader header, TypeInfo info) { reader.BaseStream.Position = header.Length; DBFile file = new DBFile(header, info); int i = 0; while (reader.BaseStream.Position < reader.BaseStream.Length) { try { file.Entries.Add(ReadFields(reader, info)); i++; } catch (Exception x) { string message = string.Format("{2} at entry {0}, db version {1}", i, file.Header.Version, x.Message); throw new DBFileNotSupportedException(message, x); } } if (file.Entries.Count != header.EntryCount) { throw new DBFileNotSupportedException(string.Format("Expected {0} entries, got {1}", header.EntryCount, file.Entries.Count)); } else if (reader.BaseStream.Position != reader.BaseStream.Length) { throw new DBFileNotSupportedException(string.Format("Expected {0} bytes, read {1}", header.Length, reader.BaseStream.Position)); } return(file); }
public static bool HeaderVersionObsolete(PackedFile packedFile) { DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); string type = DBFile.Typename(packedFile.FullPath); int maxVersion = GameManager.Instance.GetMaxDbVersion(type); return(DBTypeMap.Instance.IsSupported(type) && maxVersion != 0 && (header.Version < maxVersion)); }
public static bool CanDecode(PackedFile dbFile) { bool valid = false; try { DBFileHeader header = PackedFileDbCodec.readHeader(dbFile); DBFile decoded = PackedFileDbCodec.Decode(dbFile); valid = (decoded.Entries.Count == header.EntryCount); return(valid); } catch (Exception) { } return(valid); }
/* * Overridden to adjust to color depending on we have DB type information. */ public override void ChangeColor() { base.ChangeColor(); PackedFile packedFile = Tag as PackedFile; string text = Path.GetFileName(packedFile.Name); if (packedFile != null && packedFile.FullPath.StartsWith("db")) { if (packedFile.Data.Length == 0) { text = string.Format("{0} (empty)", packedFile.Name); } else { string mouseover; try { DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); // text = string.Format("{0} - version {1}", text, header.Version); if (header.EntryCount == 0) { // empty db file ForeColor = Color.Blue; if (Parent != null) { Parent.ForeColor = Color.Blue; } } else if (!PackedFileDbCodec.CanDecode(packedFile, out mouseover)) { if (Parent != null) { Parent.ToolTipText = mouseover; Parent.ForeColor = Color.Red; } ForeColor = Color.Red; } else if (HeaderVersionObsolete(packedFile)) { if (Parent != null) { Parent.BackColor = Color.Yellow; } BackColor = Color.Yellow; } } catch { } } } Text = text; }
public static DBFile Decode(string typeName, byte[] data) { DBFile decoded = null; using (BinaryReader reader = new BinaryReader(new MemoryStream(data))) { DBFileHeader header = PackedFileDbCodec.readHeader(reader); if (data != null && header != null) { decoded = new PackedFileDbCodec(typeName).Decode(data); } } return(decoded); }
void DumpAllGuids(PackFile pack, List <string> tables) { foreach (PackedFile file in pack) { if (file.FullPath.StartsWith("db")) { string table = DBFile.Typename(file.FullPath); if (tables.Contains(table)) { DBFileHeader header = PackedFileDbCodec.readHeader(file); Console.WriteLine("{0} - {1}", table, header.GUID); } } } }
/* * Writes the given header to the given writer. */ public static void WriteHeader(BinaryWriter writer, DBFileHeader header) { if (header.GUID != "") { writer.Write(GUID_MARKER); IOFunctions.WriteCAString(writer, header.GUID, Encoding.Unicode); } if (header.Version != 0) { writer.Write(VERSION_MARKER); writer.Write(header.Version); } writer.Write((byte)1); writer.Write(header.EntryCount); }
private List <ModLine> ReadPackedField(PackedFile file, string fileName) { if (File.Exists(fileName)) { string key = DBFile.Typename(file.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(file); string exp = null; if (PackedFileDbCodec.CanDecode(file, out exp)) { PackedFileDbCodec codec = PackedFileDbCodec.GetCodec(file); DBFile f = codec.Decode(file.Data); if (f != null) { if (f.Entries.Count > 0) { //寻找主键字段 List <string> keys = f.CurrentType.Fields.Where(p => p.PrimaryKey).Select(p => p.Name).ToList(); if (keys.Count > 0) { return(f.Entries.Select(line => { ModLine modLine = new ModLine(); modLine.FileName = fileName; modLine.TableName = f.CurrentType.Name; modLine.FieldKeyName = new string[keys.Count]; modLine.FieldKeyValue = new string[keys.Count]; for (int i = 0; i < keys.Count; i++) { modLine.FieldKeyName[i] = keys[i]; modLine.FieldKeyValue[i] = line[keys[i]].Value; } return modLine; }).ToList()); } } } } } return(new List <ModLine>()); }
public static DBFileHeader readHeader(BinaryReader reader) { byte index = reader.ReadByte(); int version = 0; string guid = ""; bool hasMarker = false; uint entryCount = 0; try { if (index != 1) { // I don't think those can actually occur more than once per file while (index == 0xFC || index == 0xFD) { var bytes = new List <byte>(4); bytes.Add(index); bytes.AddRange(reader.ReadBytes(3)); UInt32 marker = BitConverter.ToUInt32(bytes.ToArray(), 0); if (marker == GUID_MARKER) { guid = IOFunctions.ReadCAString(reader, Encoding.Unicode); index = reader.ReadByte(); } else if (marker == VERSION_MARKER) { hasMarker = true; version = reader.ReadInt32(); index = reader.ReadByte(); // break; } else { throw new DBFileNotSupportedException(string.Format("could not interpret {0}", marker)); } } } entryCount = reader.ReadUInt32(); } catch { } DBFileHeader header = new DBFileHeader(guid, version, entryCount, hasMarker); return(header); }
public static void SetColourBasedOnValidation(Collection <Node> nodes) { foreach (var node in nodes) { var packedFile = node.Tag as PackedFile; if (packedFile != null) { if (packedFile.FullPath.StartsWith("db")) { var colourNode = node as TreeNode; DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); string mouseover; if (header.EntryCount == 0) // empty db file { colourNode.Colour = Color.Blue; SetColourForParent(colourNode, colourNode.Colour.Value); } else if (!PackedFileDbCodec.CanDecode(packedFile, out mouseover)) { colourNode.Colour = Color.Red; SetColourForAllParents(colourNode, colourNode.Colour.Value); colourNode.ToolTipText = mouseover; } else if (HeaderVersionObsolete(packedFile)) { colourNode.Colour = Color.Yellow; SetColourForParent(colourNode, colourNode.Colour.Value); } } } else { SetColourBasedOnValidation(node.Nodes); } } }
public bool CanEdit(PackedFile file) { bool result = file.FullPath.StartsWith("db"); try { if (result) { string type = DBFile.Typename(file.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(file); result = (DBTypeMap.Instance.GetVersionedInfos(type, header.Version).Count > 0); #if DEBUG if (!result) { Console.WriteLine("Can't edit that!"); } #endif return(result); } } catch { result = false; } return(result); }
/* * Decode the given stream to return its data as a DBFile. */ public DBFile Decode(Stream stream) { DBFile result = null; using (TextReader reader = new StreamReader(stream)) { XmlDocument doc = new XmlDocument(); doc.Load(reader); foreach (XmlNode dataroot in doc.ChildNodes) { string guid = ""; foreach (XmlNode entry in dataroot.ChildNodes) { // ignore uuid if ("edit_uuid".Equals(entry.Name)) { guid = entry.InnerText; continue; } // use cached type info or read from TWaD if none has been cached yet string recordName = entry.Name; TypeInfo typeinfo; if (!allInfos.TryGetValue(recordName, out typeinfo)) { typeinfo = LoadTypeInfos(recordName); allInfos[recordName] = typeinfo; } // create a new header upon the first data item if (result == null) { DBFileHeader header = new DBFileHeader(guid, 0, 0, false); result = new DBFile(header, typeinfo); } // get a field-to-value map and remember the fields requiring translation Dictionary <string, string> fieldValues = new Dictionary <string, string>(); List <string> requireTranslation = new List <string>(); foreach (XmlNode row in entry.ChildNodes) { fieldValues[row.Name] = row.InnerText; XmlAttribute at = row.Attributes["requires_translation"]; if (at != null && "true".Equals(at.Value)) { requireTranslation.Add(row.Name); } } // create entry from type info and fill with values List <FieldInstance> fields = result.GetNewEntry(); foreach (FieldInstance field in fields) { string val; try { if (fieldValues.TryGetValue(field.Name, out val)) { if (field.Info.TypeName.Equals("boolean")) { field.Value = "1".Equals(val) ? "true" : "false"; } else { field.Value = val; } field.RequiresTranslation = requireTranslation.Contains(field.Name); } } catch (Exception e) { Console.WriteLine("Wait a minute!"); throw e; } } result.Entries.Add(new DBRow(typeinfo, fields)); } } } return(result); }
// read from given stream public DBFile Decode(StreamReader reader) { // another tool might have saved tabs and quotes around this // (at least open office does) string typeInfoName = reader.ReadLine().Replace("\t", "").Trim(QUOTES); string[] split = typeInfoName.Split(GUID_SEPARATOR, StringSplitOptions.RemoveEmptyEntries); if (split.Length == 2) { typeInfoName = split[0]; } string versionStr = reader.ReadLine().Replace("\t", "").Trim(QUOTES); int version; switch (versionStr) { case "1.0": version = 0; break; case "1.2": version = 1; break; default: version = int.Parse(versionStr); break; } DBFile file = null; // skip table header reader.ReadLine(); List <String> read = new List <String>(); while (!reader.EndOfStream) { read.Add(reader.ReadLine()); } List <TypeInfo> infos = DBTypeMap.Instance.GetVersionedInfos(typeInfoName, version); foreach (TypeInfo info in infos) { bool parseSuccessful = true; List <DBRow> entries = new List <DBRow> (); foreach (String line in read) { try { String[] strArray = line.Split(TABS, StringSplitOptions.None); if (strArray.Length != info.Fields.Count) { parseSuccessful = false; break; } List <FieldInstance> item = new List <FieldInstance> (); for (int i = 0; i < strArray.Length; i++) { FieldInstance field = info.Fields [i].CreateInstance(); string fieldValue = CsvUtil.Unformat(strArray [i]); field.Value = fieldValue; item.Add(field); } entries.Add(new DBRow(info, item)); #if DEBUG } catch (Exception x) { Console.WriteLine(x); #else } catch { #endif parseSuccessful = false; break; } } if (parseSuccessful) { String guid = ""; DBFileHeader header = new DBFileHeader(guid, version, (uint)entries.Count, version != 0); file = new DBFile(header, info); file.Entries.AddRange(entries); break; } } return(file); }
public static void Main(string[] args) { if (args.Length == 0) { Console.Out.WriteLine("usage: dbcorrect [-cleanup] <packfile>"); return; } bool cleanup = false; String inPackFileName = args[0]; if (args.Length == 2) { cleanup = "-cleanup".Equals(args[0]); Console.WriteLine("Cleanup enabled (will not add empty db files)"); inPackFileName = args[1]; } Console.Out.WriteLine("opening {0}", inPackFileName); PackFile packFile = new PackFileCodec().Open(inPackFileName); String correctedFileName = inPackFileName.Replace(".pack", "_corrected.pack"); String emptyFileName = inPackFileName.Replace(".pack", "_empty.pack"); String missingFileName = inPackFileName.Replace(".pack", "_unknown.pack"); PackFile correctedPack = new PackFile(correctedFileName, packFile.Header); PackFile emptyPack = new PackFile(emptyFileName, packFile.Header); PackFile missingPack = new PackFile(missingFileName, packFile.Header); DBTypeMap.Instance.InitializeTypeMap(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)); VirtualDirectory dbDir = packFile.Root.GetSubdirectory("db"); foreach (PackedFile packedFile in dbDir.AllFiles) { PackFile targetPack = correctedPack; Console.Out.WriteLine(packedFile.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); DBFileHeader newHeader = new DBFileHeader(header); if (header.EntryCount == 0) { emptyPack.Add(packedFile); continue; } String typeName = DBFile.Typename(packedFile.FullPath); byte[] fileData = packedFile.Data; // we only accept the exact type/version combination here // and only if we don't have to go around trying which one it is (yet) var infos = DBTypeMap.Instance.GetVersionedInfos(typeName, header.Version); bool added = false; foreach (Filetypes.TypeInfo typeInfo in infos) { Console.Out.WriteLine("trying {0}", typeInfo); DBFile newDbFile = new DBFile(newHeader, typeInfo); try { using (BinaryReader reader = new BinaryReader(new MemoryStream(fileData, 0, fileData.Length))) { reader.BaseStream.Position = header.Length; while (reader.BaseStream.Position != fileData.Length) { // try decoding a full row of fields and add it to the new file DBRow newRow = new DBRow(typeInfo); foreach (Filetypes.FieldInfo info in typeInfo.Fields) { newRow[info.Name].Decode(reader); //FieldInstance instance = info.CreateInstance(); //instance.Decode(reader); //newRow.Add(instance); } newDbFile.Entries.Add(newRow); } // all data read successfully! if (newDbFile.Entries.Count == header.EntryCount) { Console.Out.WriteLine("{0}: entry count {1} is correct", packedFile.FullPath, newDbFile.Entries.Count); #if DEBUG // foreach(DBRow row in newDbFile.Entries) { // String line = ""; // foreach(FieldInstance instance in row) { // line += String.Format("{0} - ", line); // } // Console.WriteLine(line); // } #endif } else { Console.Out.WriteLine("{0}: entry count {1} will be corrected to {2}", packedFile.FullPath, header.EntryCount, newDbFile.Entries.Count); } if (newDbFile.Entries.Count == 0) { targetPack = emptyPack; } PackedFile newPackedFile = new PackedFile(packedFile.FullPath, false); PackedFileDbCodec codec = PackedFileDbCodec.FromFilename(packedFile.FullPath); newPackedFile.Data = codec.Encode(newDbFile); targetPack.Add(newPackedFile); added = true; Console.Out.WriteLine("stored file with {0} entries", newDbFile.Entries.Count); break; } } catch (Exception e) { Console.Error.WriteLine("Will not add {0}: a problem occurred when reading it: {1} at entry {2}", packedFile.FullPath, e, newDbFile.Entries.Count); } } if (!added) { missingPack.Add(packedFile); } } Console.Out.WriteLine("saving {0}", correctedPack.Filepath); PackFileCodec packCodec = new PackFileCodec(); packCodec.Save(correctedPack); packCodec.Save(emptyPack); packCodec.Save(missingPack); }
// test the given packed file as a database file // tests PackedFileCodec and the db definitions we have public override void TestFile(PackedFile file) { if (!DBTypeMap.Instance.Initialized) { DBTypeMap.Instance.InitializeTypeMap(Directory.GetCurrentDirectory()); } allTestedFiles.Add(file.FullPath); if (file.Size == 0) { emptyTables.Add(new Tuple <string, int>(DBFile.Typename(file.FullPath), -1)); return; } // PackedFileDbCodec packedCodec = PackedFileDbCodec.FromFilename(file.FullPath); string type = DBFile.Typename(file.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(file); Tuple <string, int> tuple = new Tuple <string, int>(string.Format("{0} # {1}", type, header.GUID), header.Version); if (OutputTable) { Console.WriteLine("TABLE:{0}#{1}#{2}", type, header.Version, header.GUID); } Console.Out.Flush(); if (header.EntryCount == 0) { // special case: we will never find out the structure of a file // if it contains no data emptyTables.Add(tuple); } else if (DBTypeMap.Instance.IsSupported(type)) { SortedSet <Tuple <string, int> > addTo = null; try { #if DEBUG if (!string.IsNullOrEmpty(debug_at) && file.FullPath.Contains(debug_at)) { Console.WriteLine("stop right here"); } #endif // a wrong db definition might not cause errors, // but read less entries than there are DBFile dbFile = PackedFileDbCodec.Decode(file); if (dbFile.Entries.Count == dbFile.Header.EntryCount) { addTo = supported; //if (!string.IsNullOrEmpty(header.GUID)) { // List<FieldInfo> fields = new List<FieldInfo>(DBTypeMap.Instance.GetVersionedInfo(header.GUID, type, header.Version).fields); // DBTypeMap.Instance.SetByGuid(header.GUID, type, header.Version, fields); //} // only test tsv import/export if asked, // it takes some time more than just the read checks if (TestTsv) { TestTsvExport(dbFile); } } else { // didn't get what we expect addTo = invalidDefForVersion; #if DEBUG if (!string.IsNullOrEmpty(debug_at) && file.FullPath.EndsWith(debug_at)) { Console.WriteLine("adding watched to invalid"); } #endif } } catch { #if DEBUG if (!string.IsNullOrEmpty(debug_at) && file.FullPath.EndsWith(debug_at)) { Console.WriteLine("adding watched to invalid"); } #endif addTo = invalidDefForVersion; } addTo.Add(tuple); } else { noDefinition.Add(tuple); } }
public static void Main(string[] args) { PackFile exported = null; bool showManager = (args.Length > 0 && args[0].Equals("-g")); if (showManager) { DBTableDisplay display = new DBTableDisplay(); display.ShowDialog(); } else { bool export = (args.Length > 0 && args[0].Equals("-x")); Console.WriteLine("exporting undecoded to file"); exported = new PackFile("undecoded.pack", new PFHeader("PFH4")); DBTypeMap.Instance.initializeFromFile("master_schema.xml"); foreach (Game game in Game.Games) { LoadGameLocationFromFile(game); if (game.IsInstalled) { foreach (string packFileName in Directory.EnumerateFiles(game.DataDirectory, "*pack")) { Console.WriteLine("checking {0}", packFileName); PackFile packFile = new PackFileCodec().Open(packFileName); foreach (VirtualDirectory dir in packFile.Root.Subdirectories.Values) { if (dir.Name.Equals("db")) { foreach (PackedFile dbFile in dir.AllFiles) { if (dbFile.Name.Contains("models_naval")) { continue; } // DBFileHeader header = PackedFileDbCodec.readHeader(dbFile); DBFile decoded = PackedFileDbCodec.Decode(dbFile); DBFileHeader header = PackedFileDbCodec.readHeader(dbFile); if (decoded == null && header.EntryCount != 0) { Console.WriteLine("failed to read {0} in {1}", dbFile.FullPath, packFile); if (export) { String exportFileName = String.Format("db/{0}_{1}_{2}", game.Id, dbFile.Name, Path.GetFileName(packFileName)).ToLower(); PackedFile exportedDbFile = new PackedFile(exportFileName, false) { Data = dbFile.Data }; exported.Add(exportedDbFile); } else { string key = DBFile.Typename(dbFile.FullPath); bool unicode = false; if (game == Game.ETW || game == Game.NTW || game == Game.STW) { unicode = true; } DecodeTool.DecodeTool decoder = new DecodeTool.DecodeTool(unicode) { TypeName = key, Bytes = dbFile.Data }; decoder.ShowDialog(); } } } } } } if (export) { new PackFileCodec().Save(exported); } } else { Console.Error.WriteLine("Game {0} not installed in {1}", game, game.GameDirectory); } } } }