public bool HasValidCachedEntry(InputPackEntry inputFile, uint fileIndex, out PackedCacheEntry cacheEntry) { if (Entries.TryGetValue(inputFile.VolumeDirPath, out PackedCacheEntry cachedEntry)) { cacheEntry = cachedEntry; if (cachedEntry.FileIndex == fileIndex && cachedEntry.FileSize == inputFile.FileSize && cachedEntry.LastModified.Equals(inputFile.LastModified)) { return(true); } } cacheEntry = null; return(false); }
public void RegisterEntriesToRepack(string inputDir) { string[] fileNames = Directory.GetFiles(inputDir, "*", SearchOption.AllDirectories); Array.Sort(fileNames, StringComparer.OrdinalIgnoreCase); var files = fileNames.Select(fileName => new FileInfo(fileName)) //.OrderBy(e => e.FullName) //.OrderBy(file => file.LastWriteTime) // For cache purposes, important! .ToArray(); foreach (var file in files) { var entry = new InputPackEntry(); entry.FullPath = file.FullName; entry.VolumeDirPath = file.FullName.AsSpan(inputDir.Length).TrimStart('\\').ToString().Replace('\\', '/'); entry.FileSize = file.Length; entry.LastModified = new DateTime(file.LastWriteTime.Year, file.LastWriteTime.Month, file.LastWriteTime.Day, file.LastWriteTime.Hour, file.LastWriteTime.Minute, file.LastWriteTime.Second, DateTimeKind.Unspecified); FilesToPack.Add(entry.VolumeDirPath, entry); } }
private void PackFile(PackCache packCache, string outputDir, bool packAllAsNewEntries, PackCache newCache, FileEntryKey tocFile, InputPackEntry file) { Program.Log($"[:] Pack: Processing {file.VolumeDirPath}"); FileInfoKey key = FileInfos.GetByFileIndex(tocFile.EntryIndex); if (packAllAsNewEntries && !file.IsAddedFile) { uint oldEntryFileIndex = key.FileIndex; key = ModifyExistingEntryAsNew(key, file.VolumeDirPath); Program.Log($"[:] Entry key for {file.VolumeDirPath} changed as new: {oldEntryFileIndex} -> {key.FileIndex}"); } uint newUncompressedSize = (uint)file.FileSize; uint newCompressedSize = (uint)file.FileSize; string pfsFilePath = PDIPFSPathResolver.GetPathFromSeed(tocFile.EntryIndex); // Check for cached file if (ParentVolume.UsePackingCache && packCache.HasValidCachedEntry(file, key.FileIndex, out PackedCacheEntry validCacheEntry)) { string oldFilePath = Path.Combine(outputDir, pfsFilePath); if (File.Exists(oldFilePath)) { newCache.Entries.Add(file.VolumeDirPath, validCacheEntry); Program.Log($"[:] Pack: {file.VolumeDirPath} found in cache file, does not need compressing/encrypting"); string movePath = Path.Combine($"{outputDir}_temp", pfsFilePath); Directory.CreateDirectory(Path.GetDirectoryName(movePath)); File.Move(oldFilePath, Path.Combine($"{outputDir}_temp", pfsFilePath)); UpdateKeyAndRetroactiveAdjustSegments(key, (uint)validCacheEntry.CompressedFileSize, (uint)validCacheEntry.FileSize); return; } else { Program.Log($"[:] Pack: {file.VolumeDirPath} found in cache file but actual file is missing ({pfsFilePath}) - recreating it"); } } byte[] fileData = File.ReadAllBytes(file.FullPath); if (ParentVolume.NoCompress) { key.Flags &= ~FileInfoFlags.Compressed; } else if (key.Flags.HasFlag(FileInfoFlags.Compressed)) { Program.Log($"[:] Pack: Compressing {file.VolumeDirPath}"); fileData = MiscUtils.ZlibCompress(fileData); newCompressedSize = (uint)fileData.Length; } Program.Log($"[:] Pack: Saving and encrypting {file.VolumeDirPath} -> {pfsFilePath}"); // Will also update the ones we pre-registered UpdateKeyAndRetroactiveAdjustSegments(key, newCompressedSize, newUncompressedSize); ParentVolume.Keyset.CryptBytes(fileData, fileData, key.FileIndex); string outputFile = Path.Combine($"{outputDir}_temp", pfsFilePath); Directory.CreateDirectory(Path.GetDirectoryName(outputFile)); File.WriteAllBytes(outputFile, fileData); if (ParentVolume.UsePackingCache) { // Add to our new cache var newCacheEntry = new PackedCacheEntry() { FileIndex = tocFile.EntryIndex, FileSize = newUncompressedSize, LastModified = file.LastModified, VolumePath = file.VolumeDirPath, CompressedFileSize = newCompressedSize, }; newCache.Entries.Add(file.VolumeDirPath, newCacheEntry); } }