public DataTable Table(string version, string name) { var key = $"{version}_{name}"; lock (_dataPack) { if (_loadedTables.ContainsKey(key)) { return(_loadedTables[key]); } var table_files = _dataPack[version].Select(dp => dp.Root.GetSubdirectory("db")?.GetSubdirectory(name)).Where(tf => tf != null); DataTable table = null; foreach (var file in table_files.SelectMany(tf => tf.Files)) { var editedFile = PackedFileDbCodec.Decode(file); var table_piece = CreateTable(file, editedFile); if (table == null) { table = table_piece; } else { table.Merge(table_piece); } } _loadedTables[key] = table; return(table); } }
void ParseHeader() { // only read header if we have any data at all, // and if we're not in list parsing mode if (Bytes == null || IgnoreHeader) { return; } using (BinaryReader reader = new BinaryReader(new MemoryStream(Bytes))) { DBFileHeader header = PackedFileDbCodec.readHeader(reader); version = header.Version; if (Bytes != null && header != null) { if (DBTypeMap.Instance.IsSupported(TypeName)) { try { DBFile decoded = new PackedFileDbCodec(TypeName).Decode(Bytes); CurrentTypeInfo = new TypeInfo(decoded.CurrentType); #if DEBUG Console.WriteLine("Found decoding type version {0}", decoded.CurrentType.Version); #endif } catch { List <TypeInfo> infos = DBTypeMap.Instance.GetVersionedInfos(TypeName, header.Version); CurrentTypeInfo = infos[infos.Count - 1]; } } } HeaderLength = header.Length; ExpectedEntries = header.EntryCount; } }
/* * This doesn't really belong here... * changes all strings in an existing table definition to string_asci. */ public void ConvertAllStringsToAscii(string packFile) { PackFile file = new PackFileCodec().Open(packFile); foreach (PackedFile packed in file) { if (packed.FullPath.StartsWith("db")) { string typename = DBFile.Typename(packed.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(packed); if (!string.IsNullOrEmpty(header.GUID)) { // List<FieldInfo> infos = DBTypeMap.Instance.GetInfoByGuid(header.GUID); if (!CanDecode(packed)) { // we don't have an entry for this yet; try out the ones we have List <TypeInfo> allInfos = DBTypeMap.Instance.GetAllInfos(typename); if (allInfos.Count > 0) { // TryDecode(packed, header, allInfos); } else { Console.WriteLine("no info at all for {0}", typename); } } else { Console.WriteLine("already have info for {0}", header.GUID); } } } } }
/* * Create an optimized packed file from the given one. */ PackedFile CreateOptimizedFile(PackedFile toOptimize) { PackedFile result = toOptimize; // special handling for db files; leave all others as they are. if (toOptimize.FullPath.StartsWith("db")) { try { DBFile modDbFile = FromPacked(toOptimize); if (modDbFile != null) { DBFile gameDbFile = FindInGamePacks(toOptimize); if (TypesCompatible(modDbFile, gameDbFile)) { DBFileHeader header = new DBFileHeader(modDbFile.Header); DBFile optimizedFile = new DBFile(header, modDbFile.CurrentType); optimizedFile.Entries.AddRange(GetDifferingRows(modDbFile, gameDbFile)); if (optimizedFile.Entries.Count != 0) { result.Data = PackedFileDbCodec.GetCodec(toOptimize).Encode(optimizedFile); } else { result = null; } } } } catch (Exception e) { Console.Error.WriteLine(e); } } return(result); }
public FieldCorrespondencyFinder(string packFile, string xmlDir) { xmlDirectory = xmlDir; DBTypeMap.Instance.InitializeTypeMap(Directory.GetCurrentDirectory()); // initialize patchFileValues from pack file PackFile pack = new PackFileCodec().Open(packFile); foreach (PackedFile contained in pack.Files) { if (contained.FullPath.StartsWith("db")) { // no need to resolve if it's done already... string tableName = DBFile.Typename(contained.FullPath).Replace("_tables", ""); try { PackedFileDbCodec codec = PackedFileDbCodec.GetCodec(contained); codec.AutoadjustGuid = false; DBFile dbFile = codec.Decode(contained.Data); MappedDataTable table = new MappedDataTable(tableName); ValuesFromPack(table, dbFile); ValuesFromXml(table); mappedTables[tableName] = table; #if DEBUG } catch (Exception e) { Console.Error.WriteLine(e.Message); } #else } catch { }
/** * <summary>Removes all entries identical between the <paramref name="unoptimizedFile"/> and the <paramref name="referenceFiles"/> from the <paramref name="unoptimizedFile"/>.</summary> * <remarks>This function was intended to be passed a <see cref="PackedFile"/> that contains DB tables. If it is passed a PackedFile without DB tables it will not work properly.</remarks> * * <param name="unoptimizedFile">The <see cref="PackedFile"/> to be optimized. It must contain a DB table for the method to work.</param> * <param name="referenceFiles">A <see cref="List{DBFile}"/> of <see cref="DBFile">DBFiles</see> that should be checked for identical table rows in the <paramref name="unoptimizedFile"/>.</param> * * <returns>A new <see cref="PackedFile"/> that contains the optimized data from the <paramref name="unoptimizedFile"/> or null if the resulting <see cref="PackedFile"/> would be empty.</returns> */ public PackedFile OptimizePackedDBFile(PackedFile unoptimizedFile, List <DBFile> referenceFiles) { PackedFile result = unoptimizedFile; DBFile modDBFile = FromPacked(unoptimizedFile); if (modDBFile != null) { foreach (DBFile file in referenceFiles) { if (TypesCompatible(modDBFile, file)) { modDBFile.Entries.RemoveAll(file.ContainsRow); } } if (modDBFile.Entries.Count != 0) { result.Data = PackedFileDbCodec.GetCodec(unoptimizedFile).Encode(modDBFile); } else { result = null; } } return(result); }
/* * Insert the previously given values into the db table. * A warning will be printed and no data added if the given data doesn't * fit the db file's structure. */ public override void Execute() { // insert always into packed files at the save to file foreach (PackedFile packed in PackedFiles) { // we'll read from packed, but that is in the source pack; // get or create the db file in the target pack DBFile targetFile = GetTargetFile(packed); foreach (RowValues insertValues in Source.Values) { if (targetFile.CurrentType.Fields.Count == insertValues.Count) { DBRow newRow = targetFile.GetNewEntry(); for (int i = 0; i < newRow.Count; i++) { newRow[i].Value = insertValues[i]; } targetFile.Entries.Add(newRow); } else { Console.WriteLine("Cannot insert: was given {0} values, expecting {1} in {2}", insertValues.Count, targetFile.CurrentType.Fields.Count, packed.FullPath); Console.WriteLine("Values: {0}", string.Join(",", insertValues)); } } // encode and store in target pack PackedFile newPacked = new PackedFile(packed.FullPath, false); newPacked.Data = PackedFileDbCodec.GetCodec(newPacked).Encode(targetFile); SaveTo.Add(newPacked, true); } }
public static bool HeaderVersionObsolete(PackedFile packedFile) { DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); string type = DBFile.Typename(packedFile.FullPath); int maxVersion = GameManager.Instance.GetMaxDbVersion(type); return(DBTypeMap.Instance.IsSupported(type) && maxVersion != 0 && (header.Version < maxVersion)); }
public byte[] Process(PackedFile file) { byte[] result = file.Data; using (MemoryStream stream = new MemoryStream()) { DBFile dbFile = PackedFileDbCodec.Decode(file); TextDbCodec.Instance.Encode(stream, dbFile); result = stream.ToArray(); } return(result); }
public static bool CanDecode(PackedFile dbFile) { bool valid = false; try { DBFileHeader header = PackedFileDbCodec.readHeader(dbFile); DBFile decoded = PackedFileDbCodec.Decode(dbFile); valid = (decoded.Entries.Count == header.EntryCount); return(valid); } catch (Exception) { } return(valid); }
/* * Create db file from the given packed file. * Will not throw an exception on error, but return null. */ DBFile FromPacked(PackedFile packed) { DBFile result = null; try { PackedFileDbCodec codec = PackedFileDbCodec.GetCodec(packed); if (codec != null) { result = codec.Decode(packed.Data); } } catch {} return(result); }
/* * Overridden to adjust to color depending on we have DB type information. */ public override void ChangeColor() { base.ChangeColor(); PackedFile packedFile = Tag as PackedFile; string text = Path.GetFileName(packedFile.Name); if (packedFile != null && packedFile.FullPath.StartsWith("db")) { if (packedFile.Data.Length == 0) { text = string.Format("{0} (empty)", packedFile.Name); } else { string mouseover; try { DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); // text = string.Format("{0} - version {1}", text, header.Version); if (header.EntryCount == 0) { // empty db file ForeColor = Color.Blue; if (Parent != null) { Parent.ForeColor = Color.Blue; } } else if (!PackedFileDbCodec.CanDecode(packedFile, out mouseover)) { if (Parent != null) { Parent.ToolTipText = mouseover; Parent.ForeColor = Color.Red; } ForeColor = Color.Red; } else if (HeaderVersionObsolete(packedFile)) { if (Parent != null) { Parent.BackColor = Color.Yellow; } BackColor = Color.Yellow; } } catch { } } } Text = text; }
public static DBFile Decode(string typeName, byte[] data) { DBFile decoded = null; using (BinaryReader reader = new BinaryReader(new MemoryStream(data))) { DBFileHeader header = PackedFileDbCodec.readHeader(reader); if (data != null && header != null) { decoded = new PackedFileDbCodec(typeName).Decode(data); } } return(decoded); }
void DumpAllGuids(PackFile pack, List <string> tables) { foreach (PackedFile file in pack) { if (file.FullPath.StartsWith("db")) { string table = DBFile.Typename(file.FullPath); if (tables.Contains(table)) { DBFileHeader header = PackedFileDbCodec.readHeader(file); Console.WriteLine("{0} - {1}", table, header.GUID); } } } }
/* * Select all rows matching the where clause (or all in none was given) * and set the given values to all corresponding fields. * Note: If the assignment list contains a non-existing field, * that assignment is ignored without warning. */ public override void Execute() { foreach (PackedFile packed in PackedFiles) { DBFile dbFile = PackedFileDbCodec.Decode(packed); foreach (List <FieldInstance> fieldInstance in dbFile.Entries) { if (whereClause != null && !whereClause.Accept(fieldInstance)) { continue; } AdjustValues(fieldInstance); } packed.Data = PackedFileDbCodec.GetCodec(packed).Encode(dbFile); } }
/* * Open or create db file in the target pack file. */ DBFile GetTargetFile(PackedFile packed) { DBFile targetFile = null; PackedFile existingPack = SaveTo[packed.FullPath] as PackedFile; if (existingPack != null) { targetFile = PackedFileDbCodec.Decode(existingPack); } if (targetFile == null) { DBFile file = PackedFileDbCodec.Decode(packed); targetFile = new DBFile(file.Header, file.CurrentType); } return(targetFile); }
public void ImportExistingPack(string packFileName) { string modName = PACK_FILE_RE.Replace(Path.GetFileName(packFileName), ""); // trigger creation of backup folder new Mod(modName); MultiMods.Instance.AddMod(modName); PackFile pack = new PackFileCodec().Open(packFileName); foreach (PackedFile packed in pack) { // extract to working_data as binary List <string> extractPaths = new List <string>(); byte[] data = null; if (DBTypeMap.Instance.IsSupported(DBFile.typename(packed.FullPath))) { PackedFileDbCodec codec = PackedFileDbCodec.GetCodec(packed); Codec <DBFile> writerCodec = new ModToolDBCodec(FieldMappingManager.Instance); DBFile dbFile = codec.Decode(packed.Data); using (var stream = new MemoryStream()) { writerCodec.Encode(stream, dbFile); data = stream.ToArray(); } string extractFilename = string.Format("{0}.xml", packed.Name); extractPaths.Add(Path.Combine(ModTools.Instance.InstallDirectory, "raw_data", "db", extractFilename)); extractPaths.Add(Path.Combine(ModTools.Instance.RawDataPath, "EmpireDesignData", "db", extractFilename)); } else { extractPaths.Add(Path.Combine(ModTools.Instance.WorkingDataPath, packed.FullPath)); data = packed.Data; } foreach (string path in extractPaths) { string extractDir = Path.GetDirectoryName(path); if (!Directory.Exists(extractDir)) { Directory.CreateDirectory(extractDir); } File.WriteAllBytes(path, data); } } }
private List <ModLine> ReadPackedField(PackedFile file, string fileName) { if (File.Exists(fileName)) { string key = DBFile.Typename(file.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(file); string exp = null; if (PackedFileDbCodec.CanDecode(file, out exp)) { PackedFileDbCodec codec = PackedFileDbCodec.GetCodec(file); DBFile f = codec.Decode(file.Data); if (f != null) { if (f.Entries.Count > 0) { //寻找主键字段 List <string> keys = f.CurrentType.Fields.Where(p => p.PrimaryKey).Select(p => p.Name).ToList(); if (keys.Count > 0) { return(f.Entries.Select(line => { ModLine modLine = new ModLine(); modLine.FileName = fileName; modLine.TableName = f.CurrentType.Name; modLine.FieldKeyName = new string[keys.Count]; modLine.FieldKeyValue = new string[keys.Count]; for (int i = 0; i < keys.Count; i++) { modLine.FieldKeyName[i] = keys[i]; modLine.FieldKeyValue[i] = line[keys[i]].Value; } return modLine; }).ToList()); } } } } } return(new List <ModLine>()); }
public static Dictionary <string, DBFile> getDBFiles(this VirtualDirectory DB, string tablepathstring) { VirtualDirectory dir = DB.Subdirectories.FirstOrDefault(x => x.Name.Equals(tablepathstring)); if (dir == null) { return(new Dictionary <string, DBFile>(0)); } Dictionary <string, DBFile> dbFiles = new Dictionary <string, DBFile>(dir.Count()); foreach (var file in dir) { DBFile temp; try { temp = PackedFileDbCodec.Decode(file); } catch { continue; } dbFiles.Add(file.Name, temp); } return(dbFiles); }
public override void TestFile(PackedFile packed) { allTestedFiles.Add(packed.FullPath); if (packed.Data.Length == 0) { emptyFiles.Add(packed.FullPath); return; } using (var stream = new MemoryStream(packed.Data)) { BuildingModelFile bmFile = new BuildingModelFile(PackedFileDbCodec.Decode(packed)); if (bmFile.Header.EntryCount != bmFile.Models.Count) { countErrors.Add(string.Format("{0}: invalid count. Should be {1}, is {2}", packed.Name, bmFile.Header.EntryCount, bmFile.Models.Count)); } else { successes.Add(string.Format("{0}", packed.FullPath)); } } }
public static void SetColourBasedOnValidation(Collection <Node> nodes) { foreach (var node in nodes) { var packedFile = node.Tag as PackedFile; if (packedFile != null) { if (packedFile.FullPath.StartsWith("db")) { var colourNode = node as TreeNode; DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); string mouseover; if (header.EntryCount == 0) // empty db file { colourNode.Colour = Color.Blue; SetColourForParent(colourNode, colourNode.Colour.Value); } else if (!PackedFileDbCodec.CanDecode(packedFile, out mouseover)) { colourNode.Colour = Color.Red; SetColourForAllParents(colourNode, colourNode.Colour.Value); colourNode.ToolTipText = mouseover; } else if (HeaderVersionObsolete(packedFile)) { colourNode.Colour = Color.Yellow; SetColourForParent(colourNode, colourNode.Colour.Value); } } } else { SetColourBasedOnValidation(node.Nodes); } } }
public bool CanEdit(PackedFile file) { bool result = file.FullPath.StartsWith("db"); try { if (result) { string type = DBFile.Typename(file.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(file); result = (DBTypeMap.Instance.GetVersionedInfos(type, header.Version).Count > 0); #if DEBUG if (!result) { Console.WriteLine("Can't edit that!"); } #endif return(result); } } catch { result = false; } return(result); }
/* * Delete all entries matching the where clause if any was given, * or all entries if none was given. */ public override void Execute() { if (SaveTo == null) { return; } foreach (PackedFile packed in PackedFiles) { PackedFile result = new PackedFile(packed.FullPath, false); DBFile dbFile = PackedFileDbCodec.Decode(packed); List <DBRow> kept = new List <DBRow>(); foreach (DBRow field in dbFile.Entries) { if (whereClause != null && !whereClause.Accept(field)) { kept.Add(field); } } DBFile newDbFile = new DBFile(dbFile.Header, dbFile.CurrentType); newDbFile.Entries.AddRange(kept); result.Data = PackedFileDbCodec.GetCodec(packed).Encode(newDbFile); SaveTo.Add(result, true); } }
public static void Main(string[] args) { if (args.Length == 0) { Console.Out.WriteLine("usage: dbcorrect [-cleanup] <packfile>"); return; } bool cleanup = false; String inPackFileName = args[0]; if (args.Length == 2) { cleanup = "-cleanup".Equals(args[0]); Console.WriteLine("Cleanup enabled (will not add empty db files)"); inPackFileName = args[1]; } Console.Out.WriteLine("opening {0}", inPackFileName); PackFile packFile = new PackFileCodec().Open(inPackFileName); String correctedFileName = inPackFileName.Replace(".pack", "_corrected.pack"); String emptyFileName = inPackFileName.Replace(".pack", "_empty.pack"); String missingFileName = inPackFileName.Replace(".pack", "_unknown.pack"); PackFile correctedPack = new PackFile(correctedFileName, packFile.Header); PackFile emptyPack = new PackFile(emptyFileName, packFile.Header); PackFile missingPack = new PackFile(missingFileName, packFile.Header); DBTypeMap.Instance.InitializeTypeMap(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)); VirtualDirectory dbDir = packFile.Root.GetSubdirectory("db"); foreach (PackedFile packedFile in dbDir.AllFiles) { PackFile targetPack = correctedPack; Console.Out.WriteLine(packedFile.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); DBFileHeader newHeader = new DBFileHeader(header); if (header.EntryCount == 0) { emptyPack.Add(packedFile); continue; } String typeName = DBFile.Typename(packedFile.FullPath); byte[] fileData = packedFile.Data; // we only accept the exact type/version combination here // and only if we don't have to go around trying which one it is (yet) var infos = DBTypeMap.Instance.GetVersionedInfos(typeName, header.Version); bool added = false; foreach (Filetypes.TypeInfo typeInfo in infos) { Console.Out.WriteLine("trying {0}", typeInfo); DBFile newDbFile = new DBFile(newHeader, typeInfo); try { using (BinaryReader reader = new BinaryReader(new MemoryStream(fileData, 0, fileData.Length))) { reader.BaseStream.Position = header.Length; while (reader.BaseStream.Position != fileData.Length) { // try decoding a full row of fields and add it to the new file DBRow newRow = new DBRow(typeInfo); foreach (Filetypes.FieldInfo info in typeInfo.Fields) { newRow[info.Name].Decode(reader); //FieldInstance instance = info.CreateInstance(); //instance.Decode(reader); //newRow.Add(instance); } newDbFile.Entries.Add(newRow); } // all data read successfully! if (newDbFile.Entries.Count == header.EntryCount) { Console.Out.WriteLine("{0}: entry count {1} is correct", packedFile.FullPath, newDbFile.Entries.Count); #if DEBUG // foreach(DBRow row in newDbFile.Entries) { // String line = ""; // foreach(FieldInstance instance in row) { // line += String.Format("{0} - ", line); // } // Console.WriteLine(line); // } #endif } else { Console.Out.WriteLine("{0}: entry count {1} will be corrected to {2}", packedFile.FullPath, header.EntryCount, newDbFile.Entries.Count); } if (newDbFile.Entries.Count == 0) { targetPack = emptyPack; } PackedFile newPackedFile = new PackedFile(packedFile.FullPath, false); PackedFileDbCodec codec = PackedFileDbCodec.FromFilename(packedFile.FullPath); newPackedFile.Data = codec.Encode(newDbFile); targetPack.Add(newPackedFile); added = true; Console.Out.WriteLine("stored file with {0} entries", newDbFile.Entries.Count); break; } } catch (Exception e) { Console.Error.WriteLine("Will not add {0}: a problem occurred when reading it: {1} at entry {2}", packedFile.FullPath, e, newDbFile.Entries.Count); } } if (!added) { missingPack.Add(packedFile); } } Console.Out.WriteLine("saving {0}", correctedPack.Filepath); PackFileCodec packCodec = new PackFileCodec(); packCodec.Save(correctedPack); packCodec.Save(emptyPack); packCodec.Save(missingPack); }
public static void Main(string[] args) { PackFile exported = null; bool showManager = (args.Length > 0 && args[0].Equals("-g")); if (showManager) { DBTableDisplay display = new DBTableDisplay(); display.ShowDialog(); } else { bool export = (args.Length > 0 && args[0].Equals("-x")); Console.WriteLine("exporting undecoded to file"); exported = new PackFile("undecoded.pack", new PFHeader("PFH4")); DBTypeMap.Instance.initializeFromFile("master_schema.xml"); foreach (Game game in Game.Games) { LoadGameLocationFromFile(game); if (game.IsInstalled) { foreach (string packFileName in Directory.EnumerateFiles(game.DataDirectory, "*pack")) { Console.WriteLine("checking {0}", packFileName); PackFile packFile = new PackFileCodec().Open(packFileName); foreach (VirtualDirectory dir in packFile.Root.Subdirectories.Values) { if (dir.Name.Equals("db")) { foreach (PackedFile dbFile in dir.AllFiles) { if (dbFile.Name.Contains("models_naval")) { continue; } // DBFileHeader header = PackedFileDbCodec.readHeader(dbFile); DBFile decoded = PackedFileDbCodec.Decode(dbFile); DBFileHeader header = PackedFileDbCodec.readHeader(dbFile); if (decoded == null && header.EntryCount != 0) { Console.WriteLine("failed to read {0} in {1}", dbFile.FullPath, packFile); if (export) { String exportFileName = String.Format("db/{0}_{1}_{2}", game.Id, dbFile.Name, Path.GetFileName(packFileName)).ToLower(); PackedFile exportedDbFile = new PackedFile(exportFileName, false) { Data = dbFile.Data }; exported.Add(exportedDbFile); } else { string key = DBFile.Typename(dbFile.FullPath); bool unicode = false; if (game == Game.ETW || game == Game.NTW || game == Game.STW) { unicode = true; } DecodeTool.DecodeTool decoder = new DecodeTool.DecodeTool(unicode) { TypeName = key, Bytes = dbFile.Data }; decoder.ShowDialog(); } } } } } } if (export) { new PackFileCodec().Save(exported); } } else { Console.Error.WriteLine("Game {0} not installed in {1}", game, game.GameDirectory); } } } }
// test the given packed file as a database file // tests PackedFileCodec and the db definitions we have public override void TestFile(PackedFile file) { if (!DBTypeMap.Instance.Initialized) { DBTypeMap.Instance.InitializeTypeMap(Directory.GetCurrentDirectory()); } allTestedFiles.Add(file.FullPath); if (file.Size == 0) { emptyTables.Add(new Tuple <string, int>(DBFile.Typename(file.FullPath), -1)); return; } // PackedFileDbCodec packedCodec = PackedFileDbCodec.FromFilename(file.FullPath); string type = DBFile.Typename(file.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(file); Tuple <string, int> tuple = new Tuple <string, int>(string.Format("{0} # {1}", type, header.GUID), header.Version); if (OutputTable) { Console.WriteLine("TABLE:{0}#{1}#{2}", type, header.Version, header.GUID); } Console.Out.Flush(); if (header.EntryCount == 0) { // special case: we will never find out the structure of a file // if it contains no data emptyTables.Add(tuple); } else if (DBTypeMap.Instance.IsSupported(type)) { SortedSet <Tuple <string, int> > addTo = null; try { #if DEBUG if (!string.IsNullOrEmpty(debug_at) && file.FullPath.Contains(debug_at)) { Console.WriteLine("stop right here"); } #endif // a wrong db definition might not cause errors, // but read less entries than there are DBFile dbFile = PackedFileDbCodec.Decode(file); if (dbFile.Entries.Count == dbFile.Header.EntryCount) { addTo = supported; //if (!string.IsNullOrEmpty(header.GUID)) { // List<FieldInfo> fields = new List<FieldInfo>(DBTypeMap.Instance.GetVersionedInfo(header.GUID, type, header.Version).fields); // DBTypeMap.Instance.SetByGuid(header.GUID, type, header.Version, fields); //} // only test tsv import/export if asked, // it takes some time more than just the read checks if (TestTsv) { TestTsvExport(dbFile); } } else { // didn't get what we expect addTo = invalidDefForVersion; #if DEBUG if (!string.IsNullOrEmpty(debug_at) && file.FullPath.EndsWith(debug_at)) { Console.WriteLine("adding watched to invalid"); } #endif } } catch { #if DEBUG if (!string.IsNullOrEmpty(debug_at) && file.FullPath.EndsWith(debug_at)) { Console.WriteLine("adding watched to invalid"); } #endif addTo = invalidDefForVersion; } addTo.Add(tuple); } else { noDefinition.Add(tuple); } }
public void Open() { #if DEBUG Console.WriteLine("Opening {0}", CurrentPackedFile.FullPath); #endif string key = DBFile.Typename(CurrentPackedFile.FullPath); if (!DBTypeMap.Instance.IsSupported(key)) { ShowDBFileNotSupportedMessage("Sorry, this db file isn't supported yet.\r\n\r\nCurrently supported files:\r\n"); if (Settings.Default.ShowDecodeToolOnError) { var decoder = new DecodeTool.DecodeTool { TypeName = key, Bytes = CurrentPackedFile.Data }; decoder.ShowDialog(); if (!DBTypeMap.Instance.IsSupported(key)) { return; } } else { return; } } try { EditedFile = PackedFileDbCodec.Decode(CurrentPackedFile); } catch { if (Settings.Default.ShowDecodeToolOnError) { var decoder = new DecodeTool.DecodeTool { TypeName = key, Bytes = CurrentPackedFile.Data }; decoder.ShowDialog(); try { EditedFile = PackedFileDbCodec.Decode(CurrentPackedFile); } catch { return; } } else { return; } } if (EditedFile == null) { return; } Codec = PackedFileDbCodec.FromFilename(CurrentPackedFile.FullPath); TypeInfo info = EditedFile.CurrentType; dataGridView.EndEdit(); dataGridView.Columns.Clear(); if (currentDataTable != null) { currentDataTable.DataSet.Clear(); currentDataTable.Clear(); } CreateDataTable(); for (int i = 0; i < EditedFile.CurrentType.Fields.Count; i++) { dataGridView.Columns.Add(CreateColumn(i)); } DataSet currentDataSet = new DataSet(info.Name + "_DataSet"); currentDataSet.Tables.Add(currentDataTable); dataGridView.DataSource = new BindingSource(currentDataSet, info.Name + "_DataTable"); FirstColumnAsRowHeader = Settings.Default.UseFirstColumnAsRowHeader; FillRowHeaders(); addNewRowButton.Enabled = true; importButton.Enabled = true; exportButton.Enabled = true; dataGridView.Visible = true; unsupportedDBErrorTextBox.Visible = false; // cannot edit contained complex types foreach (FieldInfo f in EditedFile.CurrentType.Fields) { if (f is ListType) { Console.WriteLine("cannot edit this"); ReadOnly = true; break; } } }