public bool HasValidCachedEntry(InputPackEntry inputFile, uint fileIndex, out PackedCacheEntry cacheEntry) { if (Entries.TryGetValue(inputFile.VolumeDirPath, out PackedCacheEntry cachedEntry)) { cacheEntry = cachedEntry; if (cachedEntry.FileIndex == fileIndex && cachedEntry.FileSize == inputFile.FileSize && cachedEntry.LastModified.Equals(inputFile.LastModified)) { return(true); } } cacheEntry = null; return(false); }
/// <summary> /// Reads the specified packing cache, used to speed up the packing process by ignoring files that are already properly packed.. /// </summary> /// <param name="path"></param> public void ReadPackingCache(string path) { using var ts = File.OpenText(path); while (!ts.EndOfStream) { var entry = new PackedCacheEntry(); string line = ts.ReadLine(); string[] args = line.Split("\t"); entry.VolumePath = args[0]; entry.FileIndex = uint.Parse(args[1]); entry.LastModified = DateTime.Parse(args[2]); entry.FileSize = long.Parse(args[3]); entry.CompressedFileSize = long.Parse(args[4]); _packCache.Entries.Add(entry.VolumePath, entry); } }
private void PackFile(PackCache packCache, string outputDir, bool packAllAsNewEntries, PackCache newCache, FileEntryKey tocFile, InputPackEntry file) { Program.Log($"[:] Pack: Processing {file.VolumeDirPath}"); FileInfoKey key = FileInfos.GetByFileIndex(tocFile.EntryIndex); if (packAllAsNewEntries && !file.IsAddedFile) { uint oldEntryFileIndex = key.FileIndex; key = ModifyExistingEntryAsNew(key, file.VolumeDirPath); Program.Log($"[:] Entry key for {file.VolumeDirPath} changed as new: {oldEntryFileIndex} -> {key.FileIndex}"); } uint newUncompressedSize = (uint)file.FileSize; uint newCompressedSize = (uint)file.FileSize; string pfsFilePath = PDIPFSPathResolver.GetPathFromSeed(tocFile.EntryIndex); // Check for cached file if (ParentVolume.UsePackingCache && packCache.HasValidCachedEntry(file, key.FileIndex, out PackedCacheEntry validCacheEntry)) { string oldFilePath = Path.Combine(outputDir, pfsFilePath); if (File.Exists(oldFilePath)) { newCache.Entries.Add(file.VolumeDirPath, validCacheEntry); Program.Log($"[:] Pack: {file.VolumeDirPath} found in cache file, does not need compressing/encrypting"); string movePath = Path.Combine($"{outputDir}_temp", pfsFilePath); Directory.CreateDirectory(Path.GetDirectoryName(movePath)); File.Move(oldFilePath, Path.Combine($"{outputDir}_temp", pfsFilePath)); UpdateKeyAndRetroactiveAdjustSegments(key, (uint)validCacheEntry.CompressedFileSize, (uint)validCacheEntry.FileSize); return; } else { Program.Log($"[:] Pack: {file.VolumeDirPath} found in cache file but actual file is missing ({pfsFilePath}) - recreating it"); } } byte[] fileData = File.ReadAllBytes(file.FullPath); if (ParentVolume.NoCompress) { key.Flags &= ~FileInfoFlags.Compressed; } else if (key.Flags.HasFlag(FileInfoFlags.Compressed)) { Program.Log($"[:] Pack: Compressing {file.VolumeDirPath}"); fileData = MiscUtils.ZlibCompress(fileData); newCompressedSize = (uint)fileData.Length; } Program.Log($"[:] Pack: Saving and encrypting {file.VolumeDirPath} -> {pfsFilePath}"); // Will also update the ones we pre-registered UpdateKeyAndRetroactiveAdjustSegments(key, newCompressedSize, newUncompressedSize); ParentVolume.Keyset.CryptBytes(fileData, fileData, key.FileIndex); string outputFile = Path.Combine($"{outputDir}_temp", pfsFilePath); Directory.CreateDirectory(Path.GetDirectoryName(outputFile)); File.WriteAllBytes(outputFile, fileData); if (ParentVolume.UsePackingCache) { // Add to our new cache var newCacheEntry = new PackedCacheEntry() { FileIndex = tocFile.EntryIndex, FileSize = newUncompressedSize, LastModified = file.LastModified, VolumePath = file.VolumeDirPath, CompressedFileSize = newCompressedSize, }; newCache.Entries.Add(file.VolumeDirPath, newCacheEntry); } }