public void UnpackFromKey(FileEntryKey entryKey) { string entryPath = _volume.GetEntryPath(entryKey, ParentDirectory); if (string.IsNullOrEmpty(entryPath)) { Program.Log($"Could not determine entry path for Entry key at name Index {entryKey.NameIndex}"); return; } string fullEntryPath = Path.Combine(OutDir ?? string.Empty, entryPath); if (entryKey.Flags.HasFlag(EntryKeyFlags.Directory)) { if (_fileIndexesToExtract.Count == 0) // Make sure not to spam when not needed { if (!_volume.IsPatchVolume || _volume.NoUnpack) { Program.Log($"[:] Entering Directory: {entryPath}"); } } var childEntryBTree = new FileEntryBTree(_volume.TableOfContents.Data, (int)_volume.TableOfContents.RootAndFolderOffsets[(int)entryKey.EntryIndex]); var childUnpacker = new EntryUnpacker(_volume, OutDir, entryPath, _fileIndexesToExtract); childEntryBTree.TraverseAndUnpack(childUnpacker); } else { if (_fileIndexesToExtract.Count != 0 && !_fileIndexesToExtract.Contains((int)entryKey.EntryIndex)) { return; } if (!_volume.IsPatchVolume || _volume.NoUnpack) { Program.Log($"[:] Extracting: {entryPath}"); } var nodeBTree = new FileInfoBTree(_volume.TableOfContents.Data, (int)_volume.TableOfContents.NodeTreeOffset); var nodeKey = new FileInfoKey(entryKey.EntryIndex); uint nodeIndex = nodeBTree.SearchIndexByKey(nodeKey); if (nodeIndex != FileInfoKey.InvalidIndex) { _volume.UnpackNode(nodeKey, fullEntryPath); } } }
public bool TryCheckAndFixInvalidSegmentIndexes() { bool valid = true; List <FileInfoKey> segmentSortedFiles = FileInfos.Entries.OrderBy(e => e.SegmentIndex).ToList(); for (int i = 0; i < segmentSortedFiles.Count - 1; i++) { FileInfoKey current = segmentSortedFiles[i]; FileInfoKey next = segmentSortedFiles[i + 1]; double segmentsTakenByCurrent = MathF.Ceiling(current.CompressedSize / (float)SEGMENT_SIZE); if (next.SegmentIndex != current.SegmentIndex + segmentsTakenByCurrent) { valid = false; next.SegmentIndex = (uint)(current.SegmentIndex + segmentsTakenByCurrent); } } return(valid); }
/// <summary> /// Registers a new global path. /// </summary> /// <param name="path"></param> public void RegisterFilePath(string path) { path = path.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); string ext = Path.GetExtension(path); FileInfoKey newKey = new FileInfoKey(this.NextEntryIndex()); newKey.SegmentIndex = this.NextSegmentIndex(); if (!ParentVolume.NoCompress) { newKey.Flags |= FileInfoFlags.Compressed; } newKey.CompressedSize = 1; // Important for segments to count as at least one newKey.UncompressedSize = 1; // Same /* TODO: Needs more investigation on compressed files * if ( (path.StartsWith("crs/") && !path.EndsWith("stream")) || (path.StartsWith("car/") && !path.EndsWith("bin")) || (!path.StartsWith("replay/") && !path.StartsWith("carsound/") && !path.StartsWith("car"))) || newKey.Flags |= FileInfoFlags.Compressed; */ FileInfos.Entries.Add(newKey); string[] folders = path.Split(Path.AltDirectorySeparatorChar); uint baseDirEntryIndex = 0; for (uint i = 0; i < folders.Length - 1; i++) // Do not include file name { // If the dir doesn't exist, create a new one - we have a new index that is the last current folder if (!DirectoryExists(Files[(int)baseDirEntryIndex], folders[i], out uint parentDirEntryIndex)) { baseDirEntryIndex = RegisterDirectory(baseDirEntryIndex, folders[i]); } else { baseDirEntryIndex = parentDirEntryIndex; // Already there, update the current folder index } } RegisterFile(baseDirEntryIndex, Path.GetFileNameWithoutExtension(path), ext); }
/// <summary> /// Updates a file entry, and adjusts all the key segments if needed. /// </summary> /// <param name="fileInfo"></param> /// <param name="newCompressedSize"></param> /// <param name="newUncompressedSize"></param> private void UpdateKeyAndRetroactiveAdjustSegments(FileInfoKey fileInfo, uint newCompressedSize, uint newUncompressedSize) { float oldTotalSegments = MathF.Ceiling(fileInfo.CompressedSize / (float)SEGMENT_SIZE); float newTotalSegments = MathF.Ceiling(newCompressedSize / (float)SEGMENT_SIZE); fileInfo.CompressedSize = newCompressedSize; fileInfo.UncompressedSize = newUncompressedSize; if (oldTotalSegments != newTotalSegments) { List <FileInfoKey> orderedKeySegments = FileInfos.Entries.OrderBy(e => e.SegmentIndex).ToList(); for (int i = orderedKeySegments.IndexOf(fileInfo); i < orderedKeySegments.Count - 1; i++) { FileInfoKey currentFileInfo = orderedKeySegments[i]; FileInfoKey nextFileInfo = orderedKeySegments[i + 1]; float segmentCount = MathF.Ceiling(currentFileInfo.CompressedSize / (float)SEGMENT_SIZE); // New file pushes older files beyond segment size? Update them by the amount of segments that increases if (nextFileInfo.SegmentIndex != currentFileInfo.SegmentIndex + segmentCount) { nextFileInfo.SegmentIndex = (uint)(currentFileInfo.SegmentIndex + segmentCount); } } } }
internal static string GetLabelForFileInfoKey(FileInfoKey key) { var label = Enum.GetName(typeof(FileInfoKey), key); return(label ?? ""); }
/// <summary> /// Modifies a current key as a new entry. /// </summary> /// <param name="infoKey">Entry to modify.</param> /// <param name="newEntryPath">Path for the entry.</param> /// <returns></returns> private FileInfoKey ModifyExistingEntryAsNew(FileInfoKey infoKey, string newEntryPath) { // Check paths string[] pathParts = newEntryPath.Split(Path.AltDirectorySeparatorChar); FileEntryBTree currentSubTree = Files[0]; uint newKeyIndex = NextEntryIndex(); // Find the entry key and update it for (int i = 0; i < pathParts.Length; i++) { if (i != pathParts.Length - 1) { // Check actual folders int keyIndex = FileNames.GetIndexOfString(pathParts[i]); if (keyIndex == -1) { throw new ArgumentNullException($"Entry Key for file info key ({infoKey}) has missing file name key: {pathParts[i]}"); } FileEntryKey subTreeKey = currentSubTree.GetFolderEntryByNameIndex((uint)keyIndex); if (subTreeKey is null) { throw new InvalidOperationException($"Tried to modify existing key {newEntryPath} (str index: {keyIndex}), but missing in entries"); } else if (!subTreeKey.Flags.HasFlag(EntryKeyFlags.Directory)) { throw new InvalidOperationException($"Tried to modify existing key {newEntryPath} but entry key ({subTreeKey}) is not marked as directory. Is the volume corrupted?"); } currentSubTree = Files[(int)subTreeKey.EntryIndex]; } else { // Got the location for the subtree // Get our actual file entry key FileEntryKey entryKey = currentSubTree.Entries.FirstOrDefault(e => e.EntryIndex == infoKey.FileIndex); if (entryKey is null) { throw new ArgumentNullException($"Entry Key for file info key ({infoKey}) is missing while modifying."); } // Update it actually entryKey.EntryIndex = newKeyIndex; } } // Find the original entry key, copy from it, add to the tree foreach (FileEntryBTree tree in Files) { foreach (FileEntryKey child in tree.Entries) { if (child.EntryIndex == infoKey.FileIndex) // If the entry key exists, add it { var fileInfo = new FileInfoKey(newKeyIndex); fileInfo.CompressedSize = infoKey.CompressedSize; fileInfo.UncompressedSize = infoKey.UncompressedSize; fileInfo.SegmentIndex = NextSegmentIndex(); // Pushed to the end, so technically the segment is new, will be readjusted at the end anyway fileInfo.Flags = infoKey.Flags; FileInfos.Entries.Add(fileInfo); return(fileInfo); } } } // If it wasn't found, then we already have it infoKey.FileIndex = newKeyIndex; // Move it to the last FileInfos.Entries.Remove(infoKey); FileInfos.Entries.Add(infoKey); return(infoKey); }
private void PackFile(PackCache packCache, string outputDir, bool packAllAsNewEntries, PackCache newCache, FileEntryKey tocFile, InputPackEntry file) { Program.Log($"[:] Pack: Processing {file.VolumeDirPath}"); FileInfoKey key = FileInfos.GetByFileIndex(tocFile.EntryIndex); if (packAllAsNewEntries && !file.IsAddedFile) { uint oldEntryFileIndex = key.FileIndex; key = ModifyExistingEntryAsNew(key, file.VolumeDirPath); Program.Log($"[:] Entry key for {file.VolumeDirPath} changed as new: {oldEntryFileIndex} -> {key.FileIndex}"); } uint newUncompressedSize = (uint)file.FileSize; uint newCompressedSize = (uint)file.FileSize; string pfsFilePath = PDIPFSPathResolver.GetPathFromSeed(tocFile.EntryIndex); // Check for cached file if (ParentVolume.UsePackingCache && packCache.HasValidCachedEntry(file, key.FileIndex, out PackedCacheEntry validCacheEntry)) { string oldFilePath = Path.Combine(outputDir, pfsFilePath); if (File.Exists(oldFilePath)) { newCache.Entries.Add(file.VolumeDirPath, validCacheEntry); Program.Log($"[:] Pack: {file.VolumeDirPath} found in cache file, does not need compressing/encrypting"); string movePath = Path.Combine($"{outputDir}_temp", pfsFilePath); Directory.CreateDirectory(Path.GetDirectoryName(movePath)); File.Move(oldFilePath, Path.Combine($"{outputDir}_temp", pfsFilePath)); UpdateKeyAndRetroactiveAdjustSegments(key, (uint)validCacheEntry.CompressedFileSize, (uint)validCacheEntry.FileSize); return; } else { Program.Log($"[:] Pack: {file.VolumeDirPath} found in cache file but actual file is missing ({pfsFilePath}) - recreating it"); } } byte[] fileData = File.ReadAllBytes(file.FullPath); if (ParentVolume.NoCompress) { key.Flags &= ~FileInfoFlags.Compressed; } else if (key.Flags.HasFlag(FileInfoFlags.Compressed)) { Program.Log($"[:] Pack: Compressing {file.VolumeDirPath}"); fileData = MiscUtils.ZlibCompress(fileData); newCompressedSize = (uint)fileData.Length; } Program.Log($"[:] Pack: Saving and encrypting {file.VolumeDirPath} -> {pfsFilePath}"); // Will also update the ones we pre-registered UpdateKeyAndRetroactiveAdjustSegments(key, newCompressedSize, newUncompressedSize); ParentVolume.Keyset.CryptBytes(fileData, fileData, key.FileIndex); string outputFile = Path.Combine($"{outputDir}_temp", pfsFilePath); Directory.CreateDirectory(Path.GetDirectoryName(outputFile)); File.WriteAllBytes(outputFile, fileData); if (ParentVolume.UsePackingCache) { // Add to our new cache var newCacheEntry = new PackedCacheEntry() { FileIndex = tocFile.EntryIndex, FileSize = newUncompressedSize, LastModified = file.LastModified, VolumePath = file.VolumeDirPath, CompressedFileSize = newCompressedSize, }; newCache.Entries.Add(file.VolumeDirPath, newCacheEntry); } }
public bool UnpackNode(FileInfoKey nodeKey, string filePath) { ulong offset = DataOffset + (ulong)nodeKey.SegmentIndex * GTVolumeTOC.SEGMENT_SIZE; uint uncompressedSize = nodeKey.UncompressedSize; if (!IsPatchVolume) { if (NoUnpack) { return(false); } Stream.Position = (long)offset; if (nodeKey.Flags.HasFlag(FileInfoFlags.Compressed)) { if (!MiscUtils.DecryptCheckCompression(Stream, Keyset, nodeKey.FileIndex, uncompressedSize)) { Program.Log($"[X] Failed to decompress file ({filePath}) while unpacking file info key {nodeKey.FileIndex}", forceConsolePrint: true); return(false); } Directory.CreateDirectory(Path.GetDirectoryName(filePath)); Stream.Position -= 8; MiscUtils.DecryptAndInflateToFile(Keyset, Stream, nodeKey.FileIndex, uncompressedSize, filePath, false); } else { Directory.CreateDirectory(Path.GetDirectoryName(filePath)); MiscUtils.DecryptToFile(Keyset, Stream, nodeKey.FileIndex, uncompressedSize, filePath, false); } } else { string patchFilePath = PDIPFSPathResolver.GetPathFromSeed(nodeKey.FileIndex); string localPath = this.PatchVolumeFolder + "/" + patchFilePath; if (NoUnpack) { return(false); } /* I'm really not sure if there's a better way to do this. * Volume files, at least nodes don't seem to even store any special flag whether * it is located within an actual volume file or a patch volume. The only thing that is different is the sector index.. Sometimes node index when it's updated * It's slow, but somewhat works I guess.. * */ if (!File.Exists(localPath)) { return(false); } Program.Log($"[:] Unpacking: {patchFilePath} -> {filePath}"); using var fs = new FileStream(localPath, FileMode.Open); if (fs.Length >= 7) { Span <byte> magic = stackalloc byte[6]; fs.Read(magic); if (Encoding.ASCII.GetString(magic).StartsWith("BSDIFF")) { Program.Log($"[X] Detected BSDIFF file for {filePath} ({patchFilePath}), can not unpack yet. (fileID {nodeKey.FileIndex})", forceConsolePrint: true); return(false); } fs.Position = 0; } if (nodeKey.Flags.HasFlag(FileInfoFlags.Compressed)) { if (!MiscUtils.DecryptCheckCompression(fs, Keyset, nodeKey.FileIndex, uncompressedSize)) { Program.Log($"[X] Failed to decompress file {filePath} ({patchFilePath}) while unpacking file info key {nodeKey.FileIndex}", forceConsolePrint: true); return(false); } Directory.CreateDirectory(Path.GetDirectoryName(filePath)); fs.Position = 0; MiscUtils.DecryptAndInflateToFile(Keyset, fs, nodeKey.FileIndex, filePath); } else { Directory.CreateDirectory(Path.GetDirectoryName(filePath)); MiscUtils.DecryptToFile(Keyset, fs, nodeKey.FileIndex, filePath); } } return(true); }