public LevelRulesEditor(FileManager fileManager, PackFileEntry fileEntry) { InitializeComponent(); DoubleBuffered = true; _fileManager = fileManager; byte[] fileBytes = fileManager.GetFileBytes(fileEntry); _levelRules = new LevelRulesFile(fileEntry.Path, null); _levelRules.ParseFileBytes(fileBytes); foreach (LevelRulesFile.LevelRule levelRule in _levelRules.LevelRules) { if (levelRule.StaticRooms != null) { _LoadRooms(levelRule.StaticRooms, fileEntry.Path); } else { foreach (LevelRulesFile.Room[] levelRules in levelRule.Rules) { _LoadRooms(levelRules, fileEntry.Path); } } } }
/// <summary> /// Extracts all Excel files to their \data\ locations. /// Primarly a debug function. /// </summary> public void ExtractAllExcel(String root = null, bool doCSVAsWell = false) { if (root == null) { root = HellgatePath; } foreach (PackFileEntry fileEntry in FileEntries.Values) { if (!fileEntry.Name.EndsWith(ExcelFile.Extension)) { continue; } PackFileEntry extractFileEntry = fileEntry; //if (fileEntry.Index.ToString().Contains("4580") && fileEntry.Siblings != null) //{ // extractFileEntry = (from fi in fileEntry.Siblings // where fi.Index.ToString().Contains("4256") // select fi).FirstOrDefault(); // if (extractFileEntry == null) // { // extractFileEntry = (from fi in fileEntry.Siblings // where fi.Index.ToString().Contains("000") // select fi).FirstOrDefault(); // } // Debug.Assert(extractFileEntry != null); //} byte[] fileBytes = GetFileBytes(extractFileEntry, true); String filePath = Path.Combine(root, extractFileEntry.Path); Directory.CreateDirectory(Path.GetDirectoryName(filePath)); File.WriteAllBytes(filePath, fileBytes); if (!doCSVAsWell) { continue; } byte[] csvBytes; try { ExcelFile excelFile = new ExcelFile(fileBytes, filePath, ClientVersion); csvBytes = excelFile.ExportCSV(this); } catch (Exception) { continue; } File.WriteAllBytes(filePath.Replace(ExcelFile.Extension, ExcelFile.ExtensionDeserialised), csvBytes); } }
/// <summary> /// Gets file byte data from most principle location; considering filetimes and backup status. /// The user must manually call EndAllDatAccess to close access to any opened .dat files during the process. /// </summary> /// <param name="fileEntry">The file entry details to read.</param> /// <param name="ignorePatchedOut">If true, will ignore the files patched out state effectivly forcing file reading from .dats as if it was never patched out.</param> /// <returns>The file byte array, or null on error.</returns> public byte[] GetFileBytes(PackFileEntry fileEntry, bool ignorePatchedOut = false) { if (fileEntry == null) { return(null); } byte[] fileBytes = null; // if file is backed up, check for unpacked copy String filePath = fileEntry.Path; if (fileEntry.IsPatchedOut && !ignorePatchedOut) { filePath = filePath.Replace(@"backup\", ""); String fullPath = Path.Combine(HellgatePath, filePath); if (File.Exists(fullPath)) { try { fileBytes = File.ReadAllBytes(fullPath); } catch (Exception) { Console.WriteLine("Warning: Reading from Backup - Failed to read from file: " + fullPath); } } } // if not backed up or if backed up but file not found/readable, then read from .dat if (fileBytes == null) { try { fileBytes = fileEntry.Pack.GetFileBytes(fileEntry); } catch (Exception e) { ExceptionLogger.LogException(e); Console.WriteLine("Warning: Failed to read file from .dat: " + fileEntry.Name); return(null); } } return(fileBytes); }
/// <summary> /// /// </summary> /// <param name="file"></param> /// <param name="decompress"></param> /// <returns></returns> private byte[] _GetFileBytes(PackFileEntry file, bool decompress) { Debug.Assert(DatFile != null); int result; // we shouldn't load huge files into memory like this // todo: write progressive extraction/copy progress (cinematic files aren't compressed anyways) byte[] destBuffer = new byte[file.SizeUncompressed]; DatFile.Seek(file.Offset, SeekOrigin.Begin); if (file.SizeCompressed > 0 && decompress) { byte[] srcBuffer = new byte[file.SizeCompressed]; DatFile.Read(srcBuffer, 0, srcBuffer.Length); if (IntPtr.Size == 4) { uint len = (uint)file.SizeUncompressed; result = uncompress(destBuffer, ref len, srcBuffer, (uint)file.SizeCompressed); } else { ulong len = (uint)file.SizeUncompressed; result = uncompress(destBuffer, ref len, srcBuffer, (uint)file.SizeCompressed); } if (result != 0) { return(null); } } else { // if NOT decompressing, and file IS compressed (CompressedSize > 0), then read the compressed size int readLength = !decompress && file.SizeCompressed > 0 ? file.SizeCompressed : file.SizeUncompressed; result = DatFile.Read(destBuffer, 0, readLength); if (result != file.SizeUncompressed) { return(null); } } return(destBuffer); }
/// <summary> /// Adds file bytes to the accompanying .dat file. /// Does not remove old/duplicate file bytes from duplicate or new version additions. /// </summary> /// <param name="fileData">The file byte array to add.</param> /// <param name="fileEntry">The file entry details.</param> protected void _AddFileToDat(byte[] fileData, PackFileEntry fileEntry) { // ensure .dat file open BeginDatWriting(); Debug.Assert(DatFile != null && fileData != null && fileEntry != null); DatFile.Seek(0, SeekOrigin.End); byte[] writeBuffer; int writeLength; if (fileEntry.SizeCompressed > 0) { writeBuffer = new byte[fileData.Length]; if (IntPtr.Size == 4) // x86 { UInt32 destinationLength = (UInt32)writeBuffer.Length; compress(writeBuffer, ref destinationLength, fileData, (UInt32)fileData.Length); writeLength = (int)destinationLength; } else // x64 { UInt64 destinationLength = (UInt64)writeBuffer.Length; compress(writeBuffer, ref destinationLength, fileData, (UInt64)fileData.Length); writeLength = (int)destinationLength; } fileEntry.SizeCompressed = writeLength; } else { writeBuffer = fileData; fileEntry.SizeCompressed = 0; writeLength = fileData.Length; } fileEntry.Offset = (int)DatFile.Position; fileEntry.SizeUncompressed = fileData.Length; DatFile.Write(writeBuffer, 0, writeLength); EndDatAccess(); // Friday 25th March. There is a work around to a problem with the File Packing in Hellpack // Needs tracing back }
void ReplaceFiles(PackFileEntry[] files) { //if (!index.BeginDatWriting()) //{ // MessageBox("Failed to open accompanying dat file!\n" + index.FileNameWithoutExtension, "Error", // MessageBoxButtons.OK, MessageBoxIcon.Error); // return; //} //foreach (Index.FileEntry file in files) //{ // OpenFileDialog fileDialog = new OpenFileDialog(); // fileDialog.ShowDialog(); // if (fileDialog.FileName == "") continue; // using (FileStream buffer = new FileStream(fileDialog.FileName, FileMode.Open)) // { // try // { // byte[] byteBuffer = new byte[buffer.Length]; // buffer.Read(byteBuffer, 0, (int)buffer.Length); // index.AppendToDat(byteBuffer, true, file, true); // } // catch // { // Console.WriteLine("Caught a problem replacing a file."); // } // } //} //index.EndDatAccess(); }
private void ExtractFiles(PackFileEntry[] files) { new ProgressForm(DoExtractFiles, files).ShowDialog(this); }
/// <summary> /// /// </summary> /// <param name="file"></param> /// <param name="decompress"></param> /// <returns></returns> private byte[] _GetFileBytes(PackFileEntry file, bool decompress) { Debug.Assert(DatFile != null); int result; // we shouldn't load huge files into memory like this // todo: write progressive extraction/copy progress (cinematic files aren't compressed anyways) byte[] destBuffer = new byte[file.SizeUncompressed]; DatFile.Seek(file.Offset, SeekOrigin.Begin); if (file.SizeCompressed > 0 && decompress) { byte[] srcBuffer = new byte[file.SizeCompressed]; DatFile.Read(srcBuffer, 0, srcBuffer.Length); if (IntPtr.Size == 4) { uint len = (uint)file.SizeUncompressed; result = uncompress(destBuffer, ref len, srcBuffer, (uint)file.SizeCompressed); } else { ulong len = (uint)file.SizeUncompressed; result = uncompress(destBuffer, ref len, srcBuffer, (uint)file.SizeCompressed); } if (result != 0) { return null; } } else { // if NOT decompressing, and file IS compressed (CompressedSize > 0), then read the compressed size int readLength = !decompress && file.SizeCompressed > 0 ? file.SizeCompressed : file.SizeUncompressed; result = DatFile.Read(destBuffer, 0, readLength); if (result != file.SizeUncompressed) return null; } return destBuffer; }
/// <summary> /// Reads the accompanying .dat file for the file. /// </summary> /// <param name="file">The file to be read.</param> /// <returns>A byte array of the files bytes, or null on error.</returns> public byte[] GetFileBytes(PackFileEntry file) { return _GetFileBytes(file, true); }
/// <summary> /// Gets file byte data from most principle location; considering filetimes and backup status. /// The user must manually call EndAllDatAccess to close access to any opened .dat files during the process. /// </summary> /// <param name="fileEntry">The file entry details to read.</param> /// <param name="ignorePatchedOut">If true, will ignore the files patched out state effectivly forcing file reading from .dats as if it was never patched out.</param> /// <returns>The file byte array, or null on error.</returns> public byte[] GetFileBytes(PackFileEntry fileEntry, bool ignorePatchedOut = false) { if (fileEntry == null) return null; byte[] fileBytes = null; // if file is backed up, check for unpacked copy String filePath = fileEntry.Path; if (fileEntry.IsPatchedOut && !ignorePatchedOut) { filePath = filePath.Replace(@"backup\", ""); String fullPath = Path.Combine(HellgatePath, filePath); if (File.Exists(fullPath)) { try { fileBytes = File.ReadAllBytes(fullPath); } catch (Exception) { Console.WriteLine("Warning: Reading from Backup - Failed to read from file: " + fullPath); } } } // if not backed up or if backed up but file not found/readable, then read from .dat if (fileBytes == null) { try { fileBytes = fileEntry.Pack.GetFileBytes(fileEntry); } catch (Exception e) { ExceptionLogger.LogException(e); Console.WriteLine("Warning: Failed to read file from .dat: " + fileEntry.Name); return null; } } return fileBytes; }
/// <summary> /// Reads the accompanying .dat file for the file. /// </summary> /// <param name="file">The file to be read.</param> /// <returns>A byte array of the files bytes, or null on error.</returns> public byte[] GetFileBytes(PackFileEntry file) { return(_GetFileBytes(file, true)); }
private void _ListView_AddFileItem(PackFileEntry fileEntry) { String[] fileDetails = new String[5]; fileDetails[0] = fileEntry.Name; fileDetails[1] = fileEntry.SizeUncompressed.ToString(); fileDetails[2] = fileEntry.SizeCompressed.ToString(); fileDetails[3] = DateTime.FromFileTime(fileEntry.FileTime).ToString(); fileDetails[4] = fileEntry.IsPatchedOut ? fileEntry.Path : fileEntry.Pack.ToString(); ListViewItem listViewItem = new ListViewItem(fileDetails) { Tag = fileEntry }; if (fileEntry.IsPatchedOut) listViewItem.ForeColor = BackupColor; _files_listView.Items.Add(listViewItem); }
/// <summary> /// Parses a single index file on the specified path. Checking for accompanying dat file and populating file index. /// </summary> /// <param name="packFile">The full path of the index file to parse.</param> private void _LoadIndexFile(PackFile packFile) { // loop through index files foreach (PackFileEntry currFileEntry in packFile.Files) { //if (currFileEntry.Name.Contains("bldg_c_station_warp_next_layout.xml.cooked") || currFileEntry.Name.Contains("sku.")) //{ // int bp = 0; //} ulong pathHash = currFileEntry.PathHash; // have we added the file yet if (!FileEntries.ContainsKey(pathHash)) { FileEntries.Add(pathHash, currFileEntry); continue; } // we haven't added the file, so we need to compare file times and backup states PackFileEntry origFileEntry = FileEntries[pathHash]; // do backup checks first as they'll "override" the FileTime values (i.e. file not found causes game to go to older version) // if currFile IS a backup, and orig is NOT, then add to Siblings as game will be loading orig over "backup" anyways if (currFileEntry.IsPatchedOut && !origFileEntry.IsPatchedOut) { if (origFileEntry.Siblings == null) { origFileEntry.Siblings = new List <PackFileEntry>(); } origFileEntry.Siblings.Add(currFileEntry); continue; } // if curr is NOT a backup, but orig IS, then we want to update (i.e. don't care about FileTime; as above) // OR if orig is older than curr, we also want to update/re-arrange Siblings, etc if ((!currFileEntry.IsPatchedOut && origFileEntry.IsPatchedOut) || origFileEntry.FileTime < currFileEntry.FileTime) { // set the Siblings list to the updated FileEntry and null out other if (origFileEntry.Siblings != null) { currFileEntry.Siblings = origFileEntry.Siblings; origFileEntry.Siblings = null; } // add the "orig" (now old) to the curr FileEntry.Siblings list if (currFileEntry.Siblings == null) { currFileEntry.Siblings = new List <PackFileEntry>(); } currFileEntry.Siblings.Add(origFileEntry); FileEntries[pathHash] = currFileEntry; continue; } // if curr is older (or equal to; hellgate000 has duplicates) than the orig, then add this to the Siblings list (i.e. orig is newer) if (origFileEntry.FileTime >= currFileEntry.FileTime) { if (origFileEntry.Siblings == null) { origFileEntry.Siblings = new List <PackFileEntry>(); } origFileEntry.Siblings.Add(currFileEntry); continue; } Debug.Assert(false, "End of 'if (FileEntries.ContainsKey(hash))'", "wtf??\n\nThis shouldn't happen, please report this."); } }