private void GenerateCleanedESM(CleanedESM directive) { var filename = Path.GetFileName(directive.To); var game_file = Path.Combine(GameFolder, "Data", filename); Info($"Generating cleaned ESM for {filename}"); if (!File.Exists(game_file)) { throw new InvalidDataException($"Missing {filename} at {game_file}"); } Status($"Hashing game version of {filename}"); var sha = Utils.FileSHA256(game_file); if (sha != directive.SourceESMHash) { throw new InvalidDataException($"Cannot patch {filename} from the game folder hashes don't match have you already cleaned the file?"); } var patch_data = directive.SourceData.FromBase64(); var to_file = Path.Combine(Outputfolder, directive.To); Status($"Patching {filename}"); using (var output = File.OpenWrite(to_file)) { BSDiff.Apply(File.OpenRead(game_file), () => new MemoryStream(patch_data), output); } }
private void GenerateCleanedESM(CleanedESM directive) { var filename = Path.GetFileName(directive.To); var gameFile = Path.Combine(GameFolder, "Data", filename); Info($"Generating cleaned ESM for {filename}"); if (!File.Exists(gameFile)) { throw new InvalidDataException($"Missing {filename} at {gameFile}"); } Status($"Hashing game version of {filename}"); var sha = gameFile.FileHash(); if (sha != directive.SourceESMHash) { throw new InvalidDataException( $"Cannot patch {filename} from the game folder because the hashes do not match. Have you already cleaned the file?"); } var patchData = LoadBytesFromPath(directive.SourceDataID); var toFile = Path.Combine(OutputFolder, directive.To); Status($"Patching {filename}"); using (var output = File.Open(toFile, FileMode.Create)) using (var input = File.OpenRead(gameFile)) { BSDiff.Apply(input, () => new MemoryStream(patchData), output); } }
private void InstallArchive(Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status($"Extracting {archive.Name}"); var vfiles = grouping.Select(g => { var file = VFS.FileForArchiveHashPath(g.ArchiveHashPath); g.FromFile = file; return(g); }).ToList(); var on_finish = VFS.Stage(vfiles.Select(f => f.FromFile).Distinct()); Status("Copying files for {0}", archive.Name); vfiles.DoIndexed((idx, file) => { Utils.Status($"Installing files", idx * 100 / vfiles.Count); File.Copy(file.FromFile.StagedPath, Path.Combine(Outputfolder, file.To)); }); Status("Unstaging files"); on_finish(); // Now patch all the files from this archive foreach (var to_patch in grouping.OfType <PatchedFromArchive>()) { using (var patch_stream = new MemoryStream()) { Status("Patching {0}", Path.GetFileName(to_patch.To)); // Read in the patch data var patch_data = to_patch.Patch.FromBase64(); var to_file = Path.Combine(Outputfolder, to_patch.To); MemoryStream old_data = new MemoryStream(File.ReadAllBytes(to_file)); // Remove the file we're about to patch File.Delete(to_file); // Patch it using (var out_stream = File.OpenWrite(to_file)) { BSDiff.Apply(old_data, () => new MemoryStream(patch_data), out_stream); } } } }
public static void GenerateMerges(Installer installer) { installer.ModList .Directives .OfType <MergedPatch>() .PMap(m => { Utils.LogStatus($"Generating zEdit merge: {m.To}"); var src_data = m.Sources.Select(s => File.ReadAllBytes(Path.Combine(installer.Outputfolder, s.RelativePath))) .ConcatArrays(); var patch_data = installer.LoadBytesFromPath(m.PatchID); using (var fs = File.OpenWrite(Path.Combine(installer.Outputfolder, m.To))) BSDiff.Apply(new MemoryStream(src_data), () => new MemoryStream(patch_data), fs); }); }
private async Task InstallArchive(WorkQueue queue, Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status($"Extracting {archive.Name}"); List <FromArchive> vFiles = grouping.Select(g => { var file = VFS.Index.FileForArchiveHashPath(g.ArchiveHashPath); g.FromFile = file; return(g); }).ToList(); var onFinish = await VFS.Stage(vFiles.Select(f => f.FromFile).Distinct()); Status($"Copying files for {archive.Name}"); void CopyFile(string from, string to, bool useMove) { if (File.Exists(to)) { var fi = new FileInfo(to); if (fi.IsReadOnly) { fi.IsReadOnly = false; } File.Delete(to); } if (File.Exists(from)) { var fi = new FileInfo(from); if (fi.IsReadOnly) { fi.IsReadOnly = false; } } if (useMove) { File.Move(from, to); } else { File.Copy(from, to); } // If we don't do this, the file will use the last-modified date of the file when it was compressed // into an archive, which isn't really what we want in the case of files installed archives File.SetLastWriteTime(to, DateTime.Now); } await vFiles.GroupBy(f => f.FromFile) .PDoIndexed(queue, (idx, group) => { Utils.Status("Installing files", idx * 100 / vFiles.Count); var firstDest = Path.Combine(OutputFolder, group.First().To); CopyFile(group.Key.StagedPath, firstDest, true); foreach (var copy in group.Skip(1)) { var nextDest = Path.Combine(OutputFolder, copy.To); CopyFile(firstDest, nextDest, false); } }); Status("Unstaging files"); onFinish(); // Now patch all the files from this archive foreach (var toPatch in grouping.OfType <PatchedFromArchive>()) { using (var patchStream = new MemoryStream()) { Status($"Patching {Path.GetFileName(toPatch.To)}"); // Read in the patch data byte[] patchData = LoadBytesFromPath(toPatch.PatchID); var toFile = Path.Combine(OutputFolder, toPatch.To); var oldData = new MemoryStream(File.ReadAllBytes(toFile)); // Remove the file we're about to patch File.Delete(toFile); // Patch it using (var outStream = File.Open(toFile, FileMode.Create)) { BSDiff.Apply(oldData, () => new MemoryStream(patchData), outStream); } Status($"Verifying Patch {Path.GetFileName(toPatch.To)}"); var resultSha = toFile.FileHash(); if (resultSha != toPatch.Hash) { throw new InvalidDataException($"Invalid Hash for {toPatch.To} after patching"); } } } }
private void InstallArchive(Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status("Extracting {0}", archive.Name); var files = grouping.GroupBy(e => e.From) .ToDictionary(e => e.Key); using (var a = new ArchiveFile(absolutePath)) { a.Extract(entry => { if (files.TryGetValue(entry.FileName, out var directives)) { var directive = directives.First(); var absolute = Path.Combine(Outputfolder, directive.To); if (absolute.FileExists()) { File.Delete(absolute); } return(File.OpenWrite(absolute)); } return(null); }); } Status("Copying duplicated files for {0}", archive.Name); foreach (var dups in files.Where(e => e.Value.Count() > 1).Select(v => v.Value)) { var ffrom = dups.First(); var from_path = Path.Combine(Outputfolder, ffrom.To); foreach (var to in dups.Skip(1)) { var to_path = Path.Combine(Outputfolder, to.To); if (to_path.FileExists()) { File.Delete(to_path); } File.Copy(from_path, to_path); } } ; // Now patch all the files from this archive foreach (var to_patch in grouping.OfType <PatchedFromArchive>()) { using (var patch_stream = new MemoryStream()) { Status("Patching {0}", Path.GetFileName(to_patch.To)); // Read in the patch data var patch_data = to_patch.Patch.FromBase64(); var to_file = Path.Combine(Outputfolder, to_patch.To); MemoryStream old_data = new MemoryStream(File.ReadAllBytes(to_file)); // Remove the file we're about to patch File.Delete(to_file); // Patch it using (var out_stream = File.OpenWrite(to_file)) { BSDiff.Apply(old_data, () => new MemoryStream(patch_data), out_stream); } } } }