private void OnSaveTheseUnknown(object sender, EventArgs e) { if (this.saveAllFolderDialog.ShowDialog() != DialogResult.OK) { return; } Stream input = this.openDialog.OpenFile(); if (input == null) { return; } string basePath = this.saveAllFolderDialog.SelectedPath; List <DatabasePackedFile.Entry> files = new List <DatabasePackedFile.Entry>(); for (int i = 0; i < this.fileList.Items.Count; i++) { DatabasePackedFile.Entry index = (DatabasePackedFile.Entry)(this.fileList.Items[i].Tag); if (Lookup.Files.ContainsKey(index.Key.InstanceId) == false) { files.Add(index); } } SaveAllProgress progress = new SaveAllProgress(); progress.ShowSaveProgress(this, input, files.ToArray(), basePath); input.Close(); }
private void OnSaveTheseAll(object sender, EventArgs e) { if (this.saveAllFolderDialog.ShowDialog() != DialogResult.OK) { return; } Stream input = this.openDialog.OpenFile(); if (input == null) { return; } string basePath = this.saveAllFolderDialog.SelectedPath; DatabasePackedFile.Entry[] files = new DatabasePackedFile.Entry[this.fileList.Items.Count]; for (int i = 0; i < this.fileList.Items.Count; i++) { files[i] = (DatabasePackedFile.Entry)(this.fileList.Items[i].Tag); } SaveAllProgress progress = new SaveAllProgress(); progress.ShowSaveProgress(this, input, files, basePath); input.Close(); }
private int checkValidEntry(DatabasePackedFile.Entry entry, Database db) { int validEntry = 0; // 0 = valid if (!checkTXTCEntry(entry, db)) { validEntry = 2; // 2 = BlueLot TXTC } return(validEntry); }
private bool checkTXTCEntry(DatabasePackedFile.Entry entry, Database db) { if (entry.Key.typeId == 0x033A1435) { //textBox1.Text += " " + entry.Key.ToString() + Environment.NewLine; // textBox1.Text += " Checking for corrupted TGI offset... "; // Quick and dirty way Stream TXTC = db.GetResourceStream(entry.Key); // Read offset, first 4 bytes after ID StreamHelpers.ReadValueU32(TXTC); uint offset = StreamHelpers.ReadValueU32(TXTC); // textBox1.Text += offset.ToString() + "..."; // Seek to this offset + 8 and read the number there. TXTC.Seek(offset + 8, SeekOrigin.Begin); uint numTGIs = StreamHelpers.ReadValueU8(TXTC); // textBox1.Text += numTGIs.ToString() + " TGIs... "; // Since each TGI is 16 bytes we can calculate how many bytes they are. uint tgiSize = numTGIs * 16; uint tgiOffsetEnd = offset + 8 + 1 + tgiSize; //textBox1.Text += "TGI block end is at " + tgiOffsetEnd.ToString() + "..."; if (tgiOffsetEnd == TXTC.Length) { TXTC = null; return(true); } else { TXTC = null; return(false); } } return(true); }
public static bool fixRecursive(Database db) { uint numFixed = 0; // textBox1.Text += "Checking for corrupted TXTC entries... " + Environment.NewLine; for (int i = 0; i < db.dbpf.Entries.Count; i++) { DatabasePackedFile.Entry entry = db.dbpf.Entries[i]; if ((entry.Key.typeId == 0x0) && (entry.Key.groupId == 0x0) && (entry.Key.instanceId == 0x0)) { // Check the first 4 bytes of the stream Stream checkDbpf = db.GetResourceStream(entry.Key); string magic = MadScience.StreamHelpers.ReadStringASCII(checkDbpf, 4); if (magic == "DBPF" || magic == "DBBF") // DBPF & DBBF { db.DeleteResource(entry.Key); numFixed++; } checkDbpf.Close(); } } if (numFixed == 0) { //textBox1.Text += Environment.NewLine; //textBox1.Text += "This file appears OK!"; return(false); } else { //this.filesFixed++; //textBox1.Text += Environment.NewLine; //textBox1.Text += "This file had some corrupted TXTCs! They are now fixed."; //textBox1.Text += "Fixed " + filename + Environment.NewLine; //saveToolStripMenuItem.Enabled = true; db.Commit(true); } return(true); }
private void OnCopyKnownList(object sender, EventArgs e) { List <string> names = new List <string>(); for (int i = 0; i < this.fileList.Items.Count; i++) { DatabasePackedFile.Entry index = (DatabasePackedFile.Entry)(this.fileList.Items[i].Tag); if (Lookup.Files.ContainsKey(index.Key.InstanceId)) { names.Add(Lookup.Files[index.Key.InstanceId]); } } names.Sort(); StringBuilder output = new StringBuilder(); foreach (string name in names) { output.AppendLine(name); } Clipboard.SetText(output.ToString(), TextDataFormat.UnicodeText); }
private void button1_Click(object sender, EventArgs e) { if (listView1.SelectedItems.Count == 1) { ListViewItem item = listView1.SelectedItems[0]; int destIndexId = Convert.ToInt32(item.SubItems[5].Text); DatabasePackedFile.Entry indexEntry = new DatabasePackedFile.Entry(); DatabasePackedFile.Entry oldEntry = new DatabasePackedFile.Entry(); indexEntry = (DatabasePackedFile.Entry)indexEntries[destIndexId]; oldEntry = (DatabasePackedFile.Entry)indexEntries[destIndexId]; indexEntry.Key.InstanceId = UInt64.Parse(txtInstanceID.Text, System.Globalization.NumberStyles.AllowHexSpecifier); indexEntry.Key.GroupId = UInt32.Parse(txtGroupId.Text, System.Globalization.NumberStyles.AllowHexSpecifier); indexEntry.Offset = Convert.ToInt64(txtOffset.Text); //indexEntry.Key.InstanceId = 0; // Open DBPF Stream input = File.Open(this.currentFile.FullName, FileMode.Open, FileAccess.ReadWrite); /* DatabasePackedFile dbpf = new DatabasePackedFile(); try { dbpf.Read(input); } catch (NotAPackageException) { MessageBox.Show("bad file: {0}", openFileDialog1.FileName); input.Close(); return; } input.Seek(0, SeekOrigin.Begin); */ Database db; try { db = new Database(input, true); } catch (NotAPackageException) { MessageBox.Show("bad file: {0}", this.currentFile.FullName); input.Close(); return; } db.MoveResource(oldEntry.Key, indexEntry.Key); indexEntries[destIndexId] = indexEntry; item.SubItems[3].Text = indexEntry.Key.InstanceId.ToString("X16"); db.Commit(true); input.Close(); } }
public void SaveAll(object oinfo) { SaveAllInformation info = (SaveAllInformation)oinfo; XmlTextWriter writer = new XmlTextWriter(Path.Combine(info.BasePath, "files.xml"), Encoding.UTF8); writer.Formatting = Formatting.Indented; writer.WriteStartDocument(); writer.WriteStartElement("files"); for (int i = 0; i < info.Files.Length; i++) { DatabasePackedFile.Entry index = info.Files[i]; string fileName = null; string groupName = null; if (Lookup.Files.ContainsKey(index.Key.InstanceId)) { fileName = Lookup.Files[index.Key.InstanceId]; char[] invalids = Path.GetInvalidFileNameChars(); for (int j = 0; j < invalids.Length; j++) { fileName = fileName.Replace(invalids[j].ToString(), ""); } } else { fileName = "#" + index.Key.InstanceId.ToString("X16"); } if (Lookup.Groups.ContainsKey(index.Key.GroupId)) { groupName = Lookup.Groups[index.Key.GroupId]; } else { groupName = "#" + index.Key.GroupId.ToString("X8"); } string fragmentPath; if (Lookup.Types.ContainsKey(index.Key.TypeId) == true) { TypeLookup type = Lookup.Types[index.Key.TypeId]; fragmentPath = Path.Combine(type.Category, type.Directory); fragmentPath = Path.Combine(fragmentPath, groupName); fileName += "." + Lookup.Types[index.Key.TypeId].Extension; } else { fragmentPath = Path.Combine("unknown", "#" + index.Key.TypeId.ToString("X8")); fragmentPath = Path.Combine(fragmentPath, groupName); } Directory.CreateDirectory(Path.Combine(info.BasePath, fragmentPath)); string path = Path.Combine(fragmentPath, fileName); this.SetStatus(path, i); writer.WriteStartElement("file"); writer.WriteAttributeString("groupid", "0x" + index.Key.GroupId.ToString("X8")); writer.WriteAttributeString("instanceid", "0x" + index.Key.InstanceId.ToString("X16")); writer.WriteAttributeString("typeid", "0x" + index.Key.TypeId.ToString("X8")); writer.WriteValue(path); writer.WriteEndElement(); path = Path.Combine(info.BasePath, path); if (index.Compressed) { info.Archive.Seek(index.Offset, SeekOrigin.Begin); byte[] d = info.Archive.RefPackDecompress(); FileStream output = new FileStream(path, FileMode.Create); output.Write(d, 0, d.Length); output.Close(); } else { info.Archive.Seek(index.Offset, SeekOrigin.Begin); byte[] d = new byte[index.DecompressedSize]; info.Archive.Read(d, 0, d.Length); FileStream output = new FileStream(path, FileMode.Create); output.Write(d, 0, d.Length); output.Close(); } } writer.WriteEndElement(); writer.WriteEndDocument(); writer.Flush(); writer.Close(); this.SaveDone(); }
public PackageType getType(Database db, bool loopAll) { // Do some quick sanity checks switch (db.dbpf.packageType) { case PackageTypes.genericPackage: break; case PackageTypes.corruptBadDownload: case PackageTypes.corruptChaavik: case PackageTypes.corruptIndex: case PackageTypes.corruptPeggy: case PackageTypes.corruptNotADBPF: case PackageTypes.corruptTXTC: this.isCorrupt = true; this.pType.SubType = ""; this.pType.MainType = db.dbpf.packageType; return(this.pType); case PackageTypes.sims3Store: case PackageTypes.sims2Package: case PackageTypes.pngThumbnail: this.pType.SubType = ""; this.pType.MainType = db.dbpf.packageType; return(this.pType); } rc.Clear(); this.pType = new PackageType(); //print(db.dbpf.Entries.Count + " entries found"); for (int i = 0; i < db.dbpf.Entries.Count; i++) { DatabasePackedFile.Entry entry = db.dbpf.Entries[i]; if ((entry.Key.typeId == (uint)ResourceTypes.NULL) && (entry.Key.groupId == (uint)ResourceTypes.NULL) && (entry.Key.instanceId == (uint)ResourceTypes.NULL)) { // Check the first 4 bytes of the stream Stream checkDbpf = db.GetResourceStream(entry.Key); string magic = MadScience.StreamHelpers.ReadStringASCII(checkDbpf, 4); if (magic == "DBPF" || magic == "DBBF") // DBPF & DBBF { this.isCorrupt = true; this.pType.MainType = PackageTypes.corruptRecursive; this.pType.SubType = "This package contains another package inside it."; if (!loopAll) { return(this.pType); } } checkDbpf.Close(); } if (entry.Key.typeId == (uint)ResourceTypes.TXTC) { int isValid = checkValidEntry(entry, db); if (isValid > 0) { if (isValid == 2) { this.isCorrupt = true; this.pType.MainType = PackageTypes.corruptTXTC; if (!loopAll) { return(this.pType); } } } } if (entry.Key.typeId == (uint)ResourceTypes.PTRN) { if (this.pType.MainType == PackageTypes.genericPackage) { this.pType.MainType = PackageTypes.patternGeneric; } if (!loopAll) { return(this.pType); } } if (Enum.IsDefined(typeof(ResourceTypes), entry.Key.typeId)) { if (rc.ContainsKey(Enum.GetName(typeof(ResourceTypes), entry.Key.typeId))) { rc[Enum.GetName(typeof(ResourceTypes), entry.Key.typeId)]++; } else { rc.Add(Enum.GetName(typeof(ResourceTypes), entry.Key.typeId), 1); } } } //print("Done"); if (rc.ContainsKey("WLOT") && rc.ContainsKey("UNKW1")) { if (this.pType.MainType == PackageTypes.genericPackage) { this.pType.MainType = PackageTypes.neighbourhood; } this.isCorrupt = true; return(this.pType); } if (rc.ContainsKey("WLTL") && rc.ContainsKey("ARY2")) { this.pType.MainType = PackageTypes.lot; return(this.pType); } if (rc.ContainsKey("SIMO") && rc.ContainsKey("SIME") && rc.ContainsKey("SNAP") && rc.ContainsKey("SNAPL")) { this.pType.MainType = PackageTypes.sim; return(this.pType); } //this.pType.MainType = PackageTypes.genericPackage; // Check Objects if (rc.ContainsKey("OBJD")) { if (this.pType.MainType == PackageTypes.genericPackage) { this.pType.MainType = PackageTypes.objectGeneric; } Stream objStream = MadScience.Package.Search.getStream(db, 0x319E4F1D, -1, -1); if (StreamHelpers.isValidStream(objStream)) { OBJD objd = new OBJD(objStream); this.pType.SubType = objd.ToString(); objd = null; } return(this.pType); } if (rc.ContainsKey("S3SA")) { if (this.pType.MainType == PackageTypes.genericPackage) { this.pType.MainType = PackageTypes.coremod; } } if (rc.ContainsKey("CASP")) { if (this.pType.MainType == PackageTypes.genericPackage) { this.pType.MainType = PackageTypes.casPartGeneric; } Stream casStream = MadScience.Package.Search.getStream(db, 0x034AEECB, -1, -1); if (StreamHelpers.isValidStream(casStream)) { casPartFile cFile = new casPartFile(); cFile.Load(casStream); this.pType.SubType = cFile.clothingType(); switch (cFile.casType()) { case "Hair": this.pType.MainType = PackageTypes.casPartHair; break; case "Scalp": break; case "Face Overlay": switch (cFile.clothingType()) { case "Lipstick": case "Eyeshadow": case "Eyeliner": case "Blush": case "Makeup": case "Mascara": this.pType.MainType = PackageTypes.casPartMakeup; break; default: this.pType.MainType = PackageTypes.casPartFaceOverlay; break; } break; case "Body": this.pType.MainType = PackageTypes.casPartClothing; this.pType.SubType = cFile.clothingCategory(); // Check the TYPE of clothing we have switch (cFile.clothingType()) { case "Body": case "Top": case "Bottom": case "Shoes": // Check the age too // If we have Toddler OR Child OR Teen, plus other ages bool ageCorrupt = false; //if ((cFile.cFile.ageGender.baby || cFile.cFile.ageGender.toddler || cFile.cFile.ageGender.child || cFile.cFile.ageGender.teen) && (cFile.cFile.ageGender.youngAdult || cFile.cFile.ageGender.adult || cFile.cFile.ageGender.elder)) //{ // ageCorrupt = true; //} // If we have Baby AND any other age... if (cFile.cFile.ageGender.baby && (cFile.cFile.ageGender.toddler || cFile.cFile.ageGender.child || cFile.cFile.ageGender.teen || cFile.cFile.ageGender.youngAdult || cFile.cFile.ageGender.adult || cFile.cFile.ageGender.elder)) { ageCorrupt = true; } // If we have Toddler AND any other age... if (cFile.cFile.ageGender.toddler && (cFile.cFile.ageGender.child || cFile.cFile.ageGender.teen || cFile.cFile.ageGender.youngAdult || cFile.cFile.ageGender.adult || cFile.cFile.ageGender.elder)) { ageCorrupt = true; } // If we have Child AND any other age if (cFile.cFile.ageGender.child && (cFile.cFile.ageGender.teen || cFile.cFile.ageGender.youngAdult || cFile.cFile.ageGender.adult || cFile.cFile.ageGender.elder)) { ageCorrupt = true; } // If we have Teen AND any other age if (cFile.cFile.ageGender.teen && (cFile.cFile.ageGender.youngAdult || cFile.cFile.ageGender.adult || cFile.cFile.ageGender.elder)) { ageCorrupt = true; } if (ageCorrupt) { this.isCorrupt = true; this.pType.MainType = PackageTypes.corruptBadAges; if (!loopAll) { return(this.pType); } } break; default: break; } break; case "Accessory": this.pType.MainType = PackageTypes.casPartAccessory; break; } this.pType.SubType += " (" + cFile.ageGender() + ")"; } return(this.pType); } if (rc.ContainsKey("FBLN") && rc.ContainsKey("FACE") && rc.ContainsKey("BOND")) { if (this.pType.MainType == PackageTypes.genericPackage) { this.pType.MainType = PackageTypes.casSlider; } return(this.pType); } if (rc.ContainsKey("_IMG") && rc.Count == 1) { if (this.pType.MainType == PackageTypes.genericPackage) { this.pType.MainType = PackageTypes.textureReplacement; } } if (rc.Count == 1 && (rc.ContainsKey("_XML") || rc.ContainsKey("_XML2"))) { if (this.pType.MainType == PackageTypes.genericPackage) { this.pType.MainType = PackageTypes.xmltuningmod; } } return(this.pType); }
private bool searchForKey(string keyString) { // Validate keystring if (validateKey(keyString) == false) { return(false); } string sims3root = MadScience.Helpers.findSims3Root(); if (String.IsNullOrEmpty(sims3root)) { return(false); } toolStripProgressBar1.Minimum = 0; toolStripProgressBar1.Value = 0; toolStripStatusLabel1.Text = "Searching for image... please wait"; statusStrip1.Refresh(); Stream input = File.OpenRead(Path.Combine(sims3root, MadScience.Helpers.getGameSubPath("\\GameData\\Shared\\Packages\\FullBuild2.package"))); Database db = new Database(input, true); input.Seek(0, SeekOrigin.Begin); DatabasePackedFile dbpf = new DatabasePackedFile(); try { dbpf.Read(input); } catch (MadScience.Exceptions.NotAPackageException) { MessageBox.Show("bad file: {0}", Path.Combine(sims3root, "\\GameData\\Shared\\Packages\\FullBuild2.package")); input.Close(); return(false); } // Split the input key keyString = keyString.Replace("key:", ""); string[] temp = keyString.Split(":".ToCharArray()); uint typeID = MadScience.StringHelpers.ParseHex32("0x" + temp[0]); uint groupID = MadScience.StringHelpers.ParseHex32("0x" + temp[1]); ulong instanceID = MadScience.StringHelpers.ParseHex64("0x" + temp[2]); toolStripProgressBar1.Maximum = dbpf.Entries.Count; bool foundMatch = false; for (int i = 0; i < dbpf.Entries.Count; i++) { DatabasePackedFile.Entry entry = dbpf.Entries[i]; toolStripProgressBar1.Value++; if (entry.Key.typeId == typeID && entry.Key.groupId == groupID && entry.Key.instanceId == instanceID) { foundMatch = true; MadScience.DDSPreview ddsP = new MadScience.DDSPreview(); ddsP.loadDDS(db.GetResourceStream(entry.Key)); ddsP.ShowDialog(); break; } } toolStripProgressBar1.Value = 0; toolStripStatusLabel1.Text = ""; statusStrip1.Refresh(); input.Close(); return(foundMatch); }
public static bool fixTXTR(Database db) { uint numFixed = 0; // textBox1.Text += "Checking for corrupted TXTC entries... " + Environment.NewLine; for (int i = 0; i < db.dbpf.Entries.Count; i++) { DatabasePackedFile.Entry entry = db.dbpf.Entries[i]; if (entry.Key.typeId == 0x033A1435) { //textBox1.Text += " " + entry.Key.ToString() + Environment.NewLine; // textBox1.Text += " Checking for corrupted TGI offset... "; // Quick and dirty way Stream TXTC = db.GetResourceStream(entry.Key); // Read offset, first 4 bytes after ID StreamHelpers.ReadValueU32(TXTC); uint offset = StreamHelpers.ReadValueU32(TXTC); // textBox1.Text += offset.ToString() + "..."; // Seek to this offset + 8 and read the number there. TXTC.Seek(offset + 8, SeekOrigin.Begin); uint numTGIs = StreamHelpers.ReadValueU8(TXTC); // textBox1.Text += numTGIs.ToString() + " TGIs... "; // Since each TGI is 16 bytes we can calculate how many bytes they are. uint tgiSize = numTGIs * 16; uint tgiOffsetEnd = offset + 8 + 1 + tgiSize; //textBox1.Text += "TGI block end is at " + tgiOffsetEnd.ToString() + "..."; if (tgiOffsetEnd == TXTC.Length) { return(false); } else { // Try offset - 1 offset = offset - 1; // Seek to this offset + 8 and read the number there. TXTC.Seek(offset + 8, SeekOrigin.Begin); numTGIs = StreamHelpers.ReadValueU8(TXTC); // textBox1.Text += numTGIs.ToString() + " TGIs... "; // Since each TGI is 16 bytes we can calculate how many bytes they are. tgiSize = numTGIs * 16; tgiOffsetEnd = offset + 8 + 1 + tgiSize; //textBox1.Text += "TGI block end is at " + tgiOffsetEnd.ToString() + "..."; if (tgiOffsetEnd == TXTC.Length) { // textBox1.Text += "Correct!"; // Offset it minus 1 TXTC.Seek(4, SeekOrigin.Begin); StreamHelpers.WriteValueU32(TXTC, offset); MemoryStream newTXTC = new MemoryStream(); StreamHelpers.CopyStream(TXTC, newTXTC, true); db.SetResourceStream(entry.Key, newTXTC); // textBox1.Text += Environment.NewLine; // textBox1.Text += "The above resource has been fixed."; numFixed++; newTXTC.Close(); } else { // textBox1.Text += "Incorrect!"; } } // textBox1.Text += Environment.NewLine; TXTC = null; } } if (numFixed == 0) { //textBox1.Text += Environment.NewLine; //textBox1.Text += "This file appears OK!"; return(false); } else { //this.filesFixed++; //textBox1.Text += Environment.NewLine; //textBox1.Text += "This file had some corrupted TXTCs! They are now fixed."; //textBox1.Text += "Fixed " + filename + Environment.NewLine; //saveToolStripMenuItem.Enabled = true; db.Commit(true); } return(true); }
public void Commit(bool cleanCommit) { if (this.Stream.CanWrite == false) { throw new NotSupportedException(); } DatabasePackedFile dbpf = new DatabasePackedFile(); dbpf.Version = new Version(2, 0); if (cleanCommit == false) { if (this.EndOfDataOffset == 0) { // new archive this.Stream.Seek(this.BaseOffset, SeekOrigin.Begin); dbpf.WriteHeader(this.Stream, 0, 0); this.EndOfDataOffset = this.Stream.Position - this.BaseOffset; } foreach (KeyValuePair<ResourceKey, Entry> kvp in this._Entries) { DatabasePackedFile.Entry entry = new DatabasePackedFile.Entry(); entry.Key = kvp.Key; if (kvp.Value is MemoryEntry) { MemoryEntry memory = (MemoryEntry)kvp.Value; entry.DecompressedSize = memory.DecompressedSize; entry.CompressedSize = memory.CompressedSize | 0x80000000; entry.Flags = 1; entry.Compressed = false; // Is this replacing old data? if (this.OriginalEntries.ContainsKey(kvp.Key) == true) { StreamEntry stream = this.OriginalEntries[kvp.Key]; // Let's see if the new data can fit where the old data was if (memory.CompressedSize <= stream.CompressedSize) { entry.Offset = stream.Offset; this.Stream.Seek(this.BaseOffset + stream.Offset, SeekOrigin.Begin); this.Stream.Write(memory.Data, 0, (int)memory.CompressedSize); } else { entry.Offset = this.EndOfDataOffset; this.Stream.Seek(this.BaseOffset + this.EndOfDataOffset, SeekOrigin.Begin); this.Stream.Write(memory.Data, 0, (int)memory.CompressedSize); this.EndOfDataOffset += memory.CompressedSize; } } // New data else { entry.Offset = this.EndOfDataOffset; this.Stream.Seek(this.BaseOffset + this.EndOfDataOffset, SeekOrigin.Begin); this.Stream.Write(memory.Data, 0, (int)memory.CompressedSize); this.EndOfDataOffset += memory.CompressedSize; } } else if (kvp.Value is StreamEntry) { StreamEntry stream = (StreamEntry)kvp.Value; entry.Compressed = stream.Compressed; entry.CompressedSize = stream.CompressedSize | 0x80000000; entry.DecompressedSize = stream.DecompressedSize; entry.Offset = stream.Offset; entry.CompressionFlags = stream.CompressedFlags; entry.Flags = stream.Flags; } else { throw new InvalidOperationException(); } dbpf.Entries.Add(entry); } this.Stream.Seek(this.BaseOffset + this.EndOfDataOffset, SeekOrigin.Begin); dbpf.WriteIndex(this.Stream); long indexSize = (this.Stream.Position - (this.BaseOffset + this.EndOfDataOffset)); this.Stream.Seek(this.BaseOffset, SeekOrigin.Begin); dbpf.WriteHeader(this.Stream, this.EndOfDataOffset, indexSize); } else { Stream clean; string tempFileName = null; // Packages greater than five mb will be cleaned with a file supported stream if (this.Stream.Length >= (1024 * 1024) * 5) { tempFileName = Path.GetTempFileName(); clean = File.Open(tempFileName, FileMode.Create, FileAccess.ReadWrite, FileShare.Read); } else { clean = new MemoryStream(); } dbpf.WriteHeader(clean, 0, 0); this.EndOfDataOffset = clean.Position; foreach (KeyValuePair<ResourceKey, Entry> kvp in this._Entries) { DatabasePackedFile.Entry entry = new DatabasePackedFile.Entry(); entry.Key = kvp.Key; if (kvp.Value is MemoryEntry) { MemoryEntry memory = (MemoryEntry)kvp.Value; entry.DecompressedSize = memory.DecompressedSize; entry.CompressedSize = memory.CompressedSize | 0x80000000; entry.Flags = 1; entry.Compressed = false; entry.Offset = this.EndOfDataOffset; clean.Write(memory.Data, 0, (int)memory.CompressedSize); this.EndOfDataOffset += memory.CompressedSize; } else if (kvp.Value is StreamEntry) { StreamEntry stream = (StreamEntry)kvp.Value; entry.Compressed = stream.Compressed; entry.CompressedSize = stream.CompressedSize | 0x80000000; entry.DecompressedSize = stream.DecompressedSize; entry.CompressionFlags = stream.CompressedFlags; entry.Flags = stream.Flags; entry.Offset = this.EndOfDataOffset; // Copy data this.Stream.Seek(this.BaseOffset + stream.Offset, SeekOrigin.Begin); byte[] data = new byte[4096]; int left = (int)stream.CompressedSize; while (left > 0) { int block = Math.Min(left, (int)data.Length); this.Stream.Read(data, 0, block); clean.Write(data, 0, block); left -= block; } this.EndOfDataOffset += stream.CompressedSize; } else { throw new InvalidOperationException(); } dbpf.Entries.Add(entry); } dbpf.WriteIndex(clean); long indexSize = clean.Position - this.EndOfDataOffset; clean.Seek(0, SeekOrigin.Begin); dbpf.WriteHeader(clean, this.EndOfDataOffset, indexSize); // copy clean to real stream { this.Stream.Seek(this.BaseOffset, SeekOrigin.Begin); clean.Seek(0, SeekOrigin.Begin); byte[] data = new byte[4096]; long left = clean.Length; while (left > 0) { int block = (int)Math.Min(left, data.Length); clean.Read(data, 0, block); this.Stream.Write(data, 0, block); left -= block; } } this.Stream.SetLength(this.BaseOffset + this.EndOfDataOffset + indexSize); if (tempFileName != null) { File.Delete(tempFileName); } } this._Entries.Clear(); this.OriginalEntries.Clear(); foreach (DatabasePackedFile.Entry entry in dbpf.Entries) { this._Entries.Add(entry.Key, new StreamEntry() { Compressed = entry.Compressed, Offset = entry.Offset, CompressedSize = entry.CompressedSize, DecompressedSize = entry.DecompressedSize, CompressedFlags = entry.CompressionFlags, Flags = entry.Flags, }); } }
private void OnOpen(object sender, EventArgs e) { if (this.openDialog.ShowDialog() != DialogResult.OK) { return; } if (this.openDialog.InitialDirectory != null) { this.openDialog.InitialDirectory = null; } Stream input = this.openDialog.OpenFile(); DatabasePackedFile dbpf = new DatabasePackedFile(); dbpf.Read(input); this.AddMapNames(dbpf, input); this.DatabaseFiles = dbpf.Entries.ToArray(); Dictionary <string, TreeNode> categoryNodes = new Dictionary <string, TreeNode>(); Dictionary <uint, TreeNode> unknownTypeNodes = new Dictionary <uint, TreeNode>(); this.typeList.Nodes.Clear(); this.typeList.BeginUpdate(); TreeNode knownNode = new TreeNode("Known"); TreeNode unknownNode = new TreeNode("Unknown"); for (int i = 0; i < this.DatabaseFiles.Length; i++) { DatabasePackedFile.Entry index = this.DatabaseFiles[i]; TreeNode typeNode = null; if (Lookup.Types.ContainsKey(index.Key.TypeId) == true) { TypeLookup type = Lookup.Types[index.Key.TypeId]; TreeNode categoryNode; if (categoryNodes.ContainsKey(type.Category) == false) { categoryNode = new TreeNode(); categoryNode.Text = type.Category; categoryNode.Tag = new Dictionary <uint, TreeNode>(); knownNode.Nodes.Add(categoryNode); categoryNodes[type.Category] = categoryNode; } else { categoryNode = categoryNodes[type.Category]; } Dictionary <uint, TreeNode> typeNodes = categoryNode.Tag as Dictionary <uint, TreeNode>; if (typeNodes.ContainsKey(index.Key.TypeId) == false) { typeNode = new TreeNode(); typeNode.Text = type.Description == null ? type.Directory : type.Description; typeNode.Tag = new List <DatabasePackedFile.Entry>(); typeNode.NodeFont = this.MonospaceFont; categoryNode.Nodes.Add(typeNode); typeNodes[index.Key.TypeId] = typeNode; } else { typeNode = typeNodes[index.Key.TypeId]; } } else { if (unknownTypeNodes.ContainsKey(index.Key.TypeId) == false) { typeNode = new TreeNode(); typeNode.Text = "#" + index.Key.TypeId.ToString("X8"); typeNode.Tag = new List <DatabasePackedFile.Entry>(); typeNode.NodeFont = this.MonospaceFont; unknownNode.Nodes.Add(typeNode); unknownTypeNodes[index.Key.TypeId] = typeNode; } else { typeNode = unknownTypeNodes[index.Key.TypeId]; } } List <DatabasePackedFile.Entry> files = typeNode.Tag as List <DatabasePackedFile.Entry>; files.Add(index); } if (knownNode.Nodes.Count > 0) { this.typeList.Nodes.Add(knownNode); } if (unknownNode.Nodes.Count > 0) { this.typeList.Nodes.Add(unknownNode); } this.typeList.Sort(); this.typeList.EndUpdate(); if (knownNode.Nodes.Count > 0) { knownNode.Expand(); } else if (unknownNode.Nodes.Count > 0) { unknownNode.Expand(); } }