public PackagePack(Stream output, string sourceFolder, PleaseWait progress) { NameRegistry.Files.UsedHashes = new List <UInt32>(); var group_dirs = Directory.GetDirectories(sourceFolder); var file_query = from d in group_dirs where d != sourceFolder + "\\sporemaster\\" from f in Directory.GetFileSystemEntries(d) where !f.EndsWith(".search_index") // < these might appear in group directories if there are indexable files in subdirectories select f; var files = file_query.ToList(); files.Add(sourceFolder + "\\sporemaster\\names.txt"); if (progress != null) { progress.beginTask(1.0, files.Count); } DatabasePackedFile dbf = new DatabasePackedFile(); dbf.Version = new Version(2, 0); dbf.WriteHeader(output, 0, 0); var rw4_hash = NameRegistry.Types.toHash("rw4"); uint size, start = (uint)output.Position; foreach (var f in files) { string relativePath = f.Substring(sourceFolder.Length + 1); bool additionalOutputFiles; byte[] autoLocale = null; do { additionalOutputFiles = false; var parts = relativePath.Split(new char[] { '\\' }); if (parts.Length != 2) { continue; } var group = parts[0]; parts = parts[1].Split(new char[] { '.' }, 2); var instance = parts[0]; var extension = parts[1]; var index = new DatabaseIndex(); index.GroupId = NameRegistry.Groups.toHash(group); index.InstanceId = NameRegistry.Files.toHash(instance); try { if (relativePath == "sporemaster\\names.txt") { writeNamesFile(output); } else if (autoLocale != null) { output.Write(autoLocale, 0, autoLocale.Length); } else if (extension == "prop.xml") { extension = "prop"; writePropFile(group, instance, f, output, out autoLocale); if (autoLocale.Length != 0) { additionalOutputFiles = true; relativePath = "locale~\\auto_" + group + "_" + instance + ".locale"; } } else if (NameRegistry.Types.toHash(extension) == rw4_hash && Directory.Exists(f)) { writeRW4File(f, output); } else { writeBinaryFile(f, output); } } catch (Exception e) { throw new Exception("Error packing file '" + relativePath + "'.", e); } size = (uint)output.Position - start; index.TypeId = NameRegistry.Types.toHash(extension); index.Compressed = false; index.Flags = 1; index.DecompressedSize = size; index.CompressedSize = size | 0x80000000; index.Offset = start; dbf.Indices.Add(index); start += size; } while (additionalOutputFiles); progress.addProgress(1.0); } dbf.WriteIndex(output); size = (uint)output.Position - start; output.Seek(0, SeekOrigin.Begin); dbf.WriteHeader(output, (int)start, (int)size); output.Close(); if (progress != null) { progress.endTask(); } }
public static void Main(string[] args) { bool showHelp = false; bool verbose = false; bool shouldCompress = false; var options = new OptionSet() { { "c|compress", "enable compression", v => shouldCompress = v != null }, { "v|verbose", "be verbose", v => verbose = v != null }, { "h|help", "show this message and exit", v => showHelp = v != null }, }; List <string> extras; try { extras = options.Parse(args); } catch (OptionException e) { Console.Write("{0}: ", GetExecutableName()); Console.WriteLine(e.Message); Console.WriteLine("Try `{0} --help' for more information.", GetExecutableName()); return; } if (extras.Count < 1 || extras.Count > 2 || showHelp == true) { Console.WriteLine("Usage: {0} [OPTIONS]+ input_dir [output_package]", GetExecutableName()); Console.WriteLine(); Console.WriteLine("Options:"); options.WriteOptionDescriptions(Console.Out); return; } string filesPath = extras[0]; string filesBasePath; if (Directory.Exists(filesPath) == true) { filesBasePath = filesPath; filesPath = Path.Combine(filesBasePath, "files.xml"); } else { filesBasePath = Path.GetDirectoryName(filesPath); filesBasePath = filesBasePath ?? ""; } string outputPath = extras.Count > 1 ? extras[1] : Path.ChangeExtension(filesBasePath, ".package"); var document = new XPathDocument(filesPath); var navigator = document.CreateNavigator(); var nodes = navigator.Select("/files/file"); var filePaths = new Dictionary <ResourceKey, string>(); if (verbose == true) { Console.WriteLine("Discovering files..."); } while (nodes.MoveNext()) { var groupText = nodes.Current.GetAttribute("group", ""); var instanceText = nodes.Current.GetAttribute("instance", ""); var typeText = nodes.Current.GetAttribute("type", ""); if (groupText == null || instanceText == null || typeText == null) { throw new InvalidDataException("file missing attributes"); } uint groupId; ulong instanceId; uint typeId; if (uint.TryParse(groupText, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out groupId) == false || ulong.TryParse(instanceText, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out instanceId) == false || uint.TryParse(typeText, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out typeId) == false) { Console.WriteLine("Failed to parse resource key [{0}, {1}, {2}]!", groupText, instanceText, typeText); continue; } var key = new ResourceKey(instanceId, typeId, groupId); string inputPath; if (Path.IsPathRooted(nodes.Current.Value) == false) { // relative path, it should be relative to the XML file inputPath = Path.Combine(filesBasePath, nodes.Current.Value); } else { inputPath = nodes.Current.Value; } if (File.Exists(inputPath) == false) { Console.WriteLine(inputPath + " does not exist!"); continue; } filePaths.Add(key, inputPath); } if (verbose == true) { Console.WriteLine("Writing files..."); } using (var output = File.Create(outputPath)) { var dbpf = new DatabasePackedFile { IsBig = false, Version = new Version(3, 0), }; dbpf.WriteHeader(output, 0, 0); foreach (var kv in filePaths) { var key = kv.Key; var filePath = kv.Value; if (verbose == true) { Console.WriteLine("{0}", filePath); } using (var input = File.OpenRead(filePath)) { if (shouldCompress == false) { long offset = output.Position; output.WriteFromStream(input, (uint)input.Length); dbpf.Entries.Add(new DatabasePackedFile.Entry { Key = key, CompressedSize = (uint)input.Length | 0x80000000, UncompressedSize = (uint)input.Length, CompressionScheme = DatabasePackedFile.CompressionScheme.None, Flags = 1, Offset = offset, }); } else { byte[] compressed; var success = input.RefPackCompress((int)input.Length, out compressed); if (success == true) { long offset = output.Position; output.WriteBytes(compressed); dbpf.Entries.Add(new DatabasePackedFile.Entry { Key = key, CompressedSize = (uint)(compressed.Length) | 0x80000000, UncompressedSize = (uint)input.Length, CompressionScheme = DatabasePackedFile.CompressionScheme.RefPack, Flags = 1, Offset = offset, }); } else { input.Position = 0; long offset = output.Position; output.WriteFromStream(input, (uint)input.Length); dbpf.Entries.Add(new DatabasePackedFile.Entry { Key = key, CompressedSize = (uint)input.Length | 0x80000000, UncompressedSize = (uint)input.Length, CompressionScheme = 0, Flags = 1, Offset = offset, }); } } } } var endOfData = output.Position; dbpf.WriteIndex(output); var indexSize = (uint)(output.Position - endOfData); output.Position = 0; dbpf.WriteHeader(output, endOfData, indexSize); } }
public void Commit(bool cleanCommit) { if (this.Stream.CanWrite == false) { throw new NotSupportedException(); } DatabasePackedFile dbpf = new DatabasePackedFile(); dbpf.Version = new Version(2, 0); if (cleanCommit == false) { if (this.EndOfDataOffset == 0) { // new archive this.Stream.Seek(this.BaseOffset, SeekOrigin.Begin); dbpf.WriteHeader(this.Stream, 0, 0); this.EndOfDataOffset = this.Stream.Position - this.BaseOffset; } foreach (KeyValuePair<ResourceKey, Entry> kvp in this._Entries) { DatabasePackedFile.Entry entry = new DatabasePackedFile.Entry(); entry.Key = kvp.Key; if (kvp.Value is MemoryEntry) { MemoryEntry memory = (MemoryEntry)kvp.Value; entry.DecompressedSize = memory.DecompressedSize; entry.CompressedSize = memory.CompressedSize | 0x80000000; entry.Flags = 1; entry.Compressed = false; // Is this replacing old data? if (this.OriginalEntries.ContainsKey(kvp.Key) == true) { StreamEntry stream = this.OriginalEntries[kvp.Key]; // Let's see if the new data can fit where the old data was if (memory.CompressedSize <= stream.CompressedSize) { entry.Offset = stream.Offset; this.Stream.Seek(this.BaseOffset + stream.Offset, SeekOrigin.Begin); this.Stream.Write(memory.Data, 0, (int)memory.CompressedSize); } else { entry.Offset = this.EndOfDataOffset; this.Stream.Seek(this.BaseOffset + this.EndOfDataOffset, SeekOrigin.Begin); this.Stream.Write(memory.Data, 0, (int)memory.CompressedSize); this.EndOfDataOffset += memory.CompressedSize; } } // New data else { entry.Offset = this.EndOfDataOffset; this.Stream.Seek(this.BaseOffset + this.EndOfDataOffset, SeekOrigin.Begin); this.Stream.Write(memory.Data, 0, (int)memory.CompressedSize); this.EndOfDataOffset += memory.CompressedSize; } } else if (kvp.Value is StreamEntry) { StreamEntry stream = (StreamEntry)kvp.Value; entry.Compressed = stream.Compressed; entry.CompressedSize = stream.CompressedSize | 0x80000000; entry.DecompressedSize = stream.DecompressedSize; entry.Offset = stream.Offset; entry.CompressionFlags = stream.CompressedFlags; entry.Flags = stream.Flags; } else { throw new InvalidOperationException(); } dbpf.Entries.Add(entry); } this.Stream.Seek(this.BaseOffset + this.EndOfDataOffset, SeekOrigin.Begin); dbpf.WriteIndex(this.Stream); long indexSize = (this.Stream.Position - (this.BaseOffset + this.EndOfDataOffset)); this.Stream.Seek(this.BaseOffset, SeekOrigin.Begin); dbpf.WriteHeader(this.Stream, this.EndOfDataOffset, indexSize); } else { Stream clean; string tempFileName = null; // Packages greater than five mb will be cleaned with a file supported stream if (this.Stream.Length >= (1024 * 1024) * 5) { tempFileName = Path.GetTempFileName(); clean = File.Open(tempFileName, FileMode.Create, FileAccess.ReadWrite, FileShare.Read); } else { clean = new MemoryStream(); } dbpf.WriteHeader(clean, 0, 0); this.EndOfDataOffset = clean.Position; foreach (KeyValuePair<ResourceKey, Entry> kvp in this._Entries) { DatabasePackedFile.Entry entry = new DatabasePackedFile.Entry(); entry.Key = kvp.Key; if (kvp.Value is MemoryEntry) { MemoryEntry memory = (MemoryEntry)kvp.Value; entry.DecompressedSize = memory.DecompressedSize; entry.CompressedSize = memory.CompressedSize | 0x80000000; entry.Flags = 1; entry.Compressed = false; entry.Offset = this.EndOfDataOffset; clean.Write(memory.Data, 0, (int)memory.CompressedSize); this.EndOfDataOffset += memory.CompressedSize; } else if (kvp.Value is StreamEntry) { StreamEntry stream = (StreamEntry)kvp.Value; entry.Compressed = stream.Compressed; entry.CompressedSize = stream.CompressedSize | 0x80000000; entry.DecompressedSize = stream.DecompressedSize; entry.CompressionFlags = stream.CompressedFlags; entry.Flags = stream.Flags; entry.Offset = this.EndOfDataOffset; // Copy data this.Stream.Seek(this.BaseOffset + stream.Offset, SeekOrigin.Begin); byte[] data = new byte[4096]; int left = (int)stream.CompressedSize; while (left > 0) { int block = Math.Min(left, (int)data.Length); this.Stream.Read(data, 0, block); clean.Write(data, 0, block); left -= block; } this.EndOfDataOffset += stream.CompressedSize; } else { throw new InvalidOperationException(); } dbpf.Entries.Add(entry); } dbpf.WriteIndex(clean); long indexSize = clean.Position - this.EndOfDataOffset; clean.Seek(0, SeekOrigin.Begin); dbpf.WriteHeader(clean, this.EndOfDataOffset, indexSize); // copy clean to real stream { this.Stream.Seek(this.BaseOffset, SeekOrigin.Begin); clean.Seek(0, SeekOrigin.Begin); byte[] data = new byte[4096]; long left = clean.Length; while (left > 0) { int block = (int)Math.Min(left, data.Length); clean.Read(data, 0, block); this.Stream.Write(data, 0, block); left -= block; } } this.Stream.SetLength(this.BaseOffset + this.EndOfDataOffset + indexSize); if (tempFileName != null) { File.Delete(tempFileName); } } this._Entries.Clear(); this.OriginalEntries.Clear(); foreach (DatabasePackedFile.Entry entry in dbpf.Entries) { this._Entries.Add(entry.Key, new StreamEntry() { Compressed = entry.Compressed, Offset = entry.Offset, CompressedSize = entry.CompressedSize, DecompressedSize = entry.DecompressedSize, CompressedFlags = entry.CompressionFlags, Flags = entry.Flags, }); } }