/* * Query user for a pack to open to import data from, then open pack browser to * let user select the packed files he wants to start off with. * Selected packed files will be added to given pack. */ private void ImportDataFromGame(PackFile newFile) { // open existing CA pack or create new pack string gamePath = GameManager.Instance.CurrentGame.GameDirectory; if (gamePath != null && Directory.Exists(gamePath)) { OpenFileDialog packOpenFileDialog = new OpenFileDialog { InitialDirectory = Path.Combine(gamePath, "data"), Filter = IOFunctions.PACKAGE_FILTER, Title = "Open pack to extract basic data from" }; if (packOpenFileDialog.ShowDialog() == DialogResult.OK) { try { PackBrowseDialog browser = new PackBrowseDialog { PackFile = new PackFileCodec().Open(packOpenFileDialog.FileName) }; if (browser.ShowDialog() == DialogResult.OK) { foreach (PackedFile packed in browser.SelectedFiles) { newFile.Add(packed, false); } } } catch (Exception e) { MessageBox.Show(string.Format("Failed to import data: {0}", e)); } } } }
/** * <summary>Removes all elements identical between the selected game's <see cref="PackFile">PackFiles</see> and passed PackFile.</summary> * <remarks> * Does not alter non-DB files in the passed <see cref="PackFile"/>. * DB files in the passed PackFile have all identical rows removed from the DB file. * For rows to be identical the schemas must be the same. * <see cref="PackedFile">PackedFiles</see> that have no unique rows are removed from the final <see cref="PackFile"/>. * </remarks> * * <param name="toOptimize">The <see cref="PackFile"/> to be optimized</param> * <returns>A new <see cref="PackFile"/> that contains the optimized data of <paramref name="toOptimize"/>.</returns> * * XXX This could be optimized better by multi-threading the foreach loop if adding <see cref="PackedFile">PackedFiles</see> to a <see cref="PackFile"/> was thread-safe. */ public PackFile CreateOptimizedFile(PackFile toOptimize) { PFHeader header = new PFHeader(toOptimize.Header); string newPackName = Path.Combine(Path.GetDirectoryName(toOptimize.Filepath), string.Format("optimized_{0}", Path.GetFileName(toOptimize.Filepath))); PackFile result = new PackFile(newPackName, header); ConcurrentDictionary <string, List <DBFile> > gameDBFiles = FindGameDBFiles(); PackedFile optimized; List <DBFile> referenceFiles; foreach (PackedFile file in toOptimize) { if (file.FullPath.StartsWith("db" + Path.DirectorySeparatorChar)) { if (gameDBFiles.TryGetValue(DBFile.Typename(file.FullPath), out referenceFiles)) { optimized = OptimizePackedDBFile(file, referenceFiles); } else { result.Add(file); continue; } if (optimized != null) { result.Add(optimized); } } else { result.Add(file); } } return(result); }
/* * Main interface method. */ public PackFile CreateOptimizedFile(PackFile toOptimize) { PFHeader header = new PFHeader(toOptimize.Header); string newPackName = Path.Combine(Path.GetDirectoryName(toOptimize.Filepath), string.Format("optimized_{0}", Path.GetFileName(toOptimize.Filepath))); PackFile result = new PackFile(newPackName, header); foreach (PackedFile file in toOptimize) { PackedFile optimized = CreateOptimizedFile(file); if (optimized != null) { result.Add(optimized); } } return(result); }
/* * Create a new pack containing the given files. */ void CreatePack(string packFileName, List <string> containedFiles) { try { PFHeader header = new PFHeader("PFH4") { Version = 0, Type = PackType.Mod }; PackFile packFile = new PackFile(packFileName, header); foreach (string file in containedFiles) { try { HandlingFile(file); PackedFile toAdd = new PackedFile(file); packFile.Add(toAdd, true); } catch (Exception e) { Console.Error.WriteLine("Failed to add {0}: {1}", file, e.Message); } } new PackFileCodec().WriteToFile(packFileName, packFile); } catch (Exception e) { Console.Error.WriteLine("Failed to write {0}: {1}", packFileName, e.Message); } }
public static void Main(string[] args) { PackFile exported = null; bool showManager = (args.Length > 0 && args[0].Equals("-g")); if (showManager) { DBTableDisplay display = new DBTableDisplay(); display.ShowDialog(); } else { bool export = (args.Length > 0 && args[0].Equals("-x")); Console.WriteLine("exporting undecoded to file"); exported = new PackFile("undecoded.pack", new PFHeader("PFH4")); DBTypeMap.Instance.initializeFromFile("master_schema.xml"); foreach (Game game in Game.Games) { LoadGameLocationFromFile(game); if (game.IsInstalled) { foreach (string packFileName in Directory.EnumerateFiles(game.DataDirectory, "*pack")) { Console.WriteLine("checking {0}", packFileName); PackFile packFile = new PackFileCodec().Open(packFileName); foreach (VirtualDirectory dir in packFile.Root.Subdirectories.Values) { if (dir.Name.Equals("db")) { foreach (PackedFile dbFile in dir.AllFiles) { if (dbFile.Name.Contains("models_naval")) { continue; } // DBFileHeader header = PackedFileDbCodec.readHeader(dbFile); DBFile decoded = PackedFileDbCodec.Decode(dbFile); DBFileHeader header = PackedFileDbCodec.readHeader(dbFile); if (decoded == null && header.EntryCount != 0) { Console.WriteLine("failed to read {0} in {1}", dbFile.FullPath, packFile); if (export) { String exportFileName = String.Format("db/{0}_{1}_{2}", game.Id, dbFile.Name, Path.GetFileName(packFileName)).ToLower(); PackedFile exportedDbFile = new PackedFile(exportFileName, false) { Data = dbFile.Data }; exported.Add(exportedDbFile); } else { string key = DBFile.Typename(dbFile.FullPath); bool unicode = false; if (game == Game.ETW || game == Game.NTW || game == Game.STW) { unicode = true; } DecodeTool.DecodeTool decoder = new DecodeTool.DecodeTool(unicode) { TypeName = key, Bytes = dbFile.Data }; decoder.ShowDialog(); } } } } } } if (export) { new PackFileCodec().Save(exported); } } else { Console.Error.WriteLine("Game {0} not installed in {1}", game, game.GameDirectory); } } } }
public static void Main(string[] args) { if (args.Length == 0) { Console.Out.WriteLine("usage: dbcorrect [-cleanup] <packfile>"); return; } bool cleanup = false; String inPackFileName = args[0]; if (args.Length == 2) { cleanup = "-cleanup".Equals(args[0]); Console.WriteLine("Cleanup enabled (will not add empty db files)"); inPackFileName = args[1]; } Console.Out.WriteLine("opening {0}", inPackFileName); PackFile packFile = new PackFileCodec().Open(inPackFileName); String correctedFileName = inPackFileName.Replace(".pack", "_corrected.pack"); String emptyFileName = inPackFileName.Replace(".pack", "_empty.pack"); String missingFileName = inPackFileName.Replace(".pack", "_unknown.pack"); PackFile correctedPack = new PackFile(correctedFileName, packFile.Header); PackFile emptyPack = new PackFile(emptyFileName, packFile.Header); PackFile missingPack = new PackFile(missingFileName, packFile.Header); DBTypeMap.Instance.InitializeTypeMap(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)); VirtualDirectory dbDir = packFile.Root.GetSubdirectory("db"); foreach (PackedFile packedFile in dbDir.AllFiles) { PackFile targetPack = correctedPack; Console.Out.WriteLine(packedFile.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); DBFileHeader newHeader = new DBFileHeader(header); if (header.EntryCount == 0) { emptyPack.Add(packedFile); continue; } String typeName = DBFile.Typename(packedFile.FullPath); byte[] fileData = packedFile.Data; // we only accept the exact type/version combination here // and only if we don't have to go around trying which one it is (yet) var infos = DBTypeMap.Instance.GetVersionedInfos(typeName, header.Version); bool added = false; foreach (Filetypes.TypeInfo typeInfo in infos) { Console.Out.WriteLine("trying {0}", typeInfo); DBFile newDbFile = new DBFile(newHeader, typeInfo); try { using (BinaryReader reader = new BinaryReader(new MemoryStream(fileData, 0, fileData.Length))) { reader.BaseStream.Position = header.Length; while (reader.BaseStream.Position != fileData.Length) { // try decoding a full row of fields and add it to the new file DBRow newRow = new DBRow(typeInfo); foreach (Filetypes.FieldInfo info in typeInfo.Fields) { newRow[info.Name].Decode(reader); //FieldInstance instance = info.CreateInstance(); //instance.Decode(reader); //newRow.Add(instance); } newDbFile.Entries.Add(newRow); } // all data read successfully! if (newDbFile.Entries.Count == header.EntryCount) { Console.Out.WriteLine("{0}: entry count {1} is correct", packedFile.FullPath, newDbFile.Entries.Count); #if DEBUG // foreach(DBRow row in newDbFile.Entries) { // String line = ""; // foreach(FieldInstance instance in row) { // line += String.Format("{0} - ", line); // } // Console.WriteLine(line); // } #endif } else { Console.Out.WriteLine("{0}: entry count {1} will be corrected to {2}", packedFile.FullPath, header.EntryCount, newDbFile.Entries.Count); } if (newDbFile.Entries.Count == 0) { targetPack = emptyPack; } PackedFile newPackedFile = new PackedFile(packedFile.FullPath, false); PackedFileDbCodec codec = PackedFileDbCodec.FromFilename(packedFile.FullPath); newPackedFile.Data = codec.Encode(newDbFile); targetPack.Add(newPackedFile); added = true; Console.Out.WriteLine("stored file with {0} entries", newDbFile.Entries.Count); break; } } catch (Exception e) { Console.Error.WriteLine("Will not add {0}: a problem occurred when reading it: {1} at entry {2}", packedFile.FullPath, e, newDbFile.Entries.Count); } } if (!added) { missingPack.Add(packedFile); } } Console.Out.WriteLine("saving {0}", correctedPack.Filepath); PackFileCodec packCodec = new PackFileCodec(); packCodec.Save(correctedPack); packCodec.Save(emptyPack); packCodec.Save(missingPack); }