private void GenerateCleanedESM(CleanedESM directive) { var filename = Path.GetFileName(directive.To); var gameFile = Path.Combine(GameFolder, "Data", filename); Info($"Generating cleaned ESM for {filename}"); if (!File.Exists(gameFile)) { throw new InvalidDataException($"Missing {filename} at {gameFile}"); } Status($"Hashing game version of {filename}"); var sha = gameFile.FileHash(); if (sha != directive.SourceESMHash) { throw new InvalidDataException( $"Cannot patch {filename} from the game folder because the hashes do not match. Have you already cleaned the file?"); } var patchData = LoadBytesFromPath(directive.SourceDataID); var toFile = Path.Combine(OutputFolder, directive.To); Status($"Patching {filename}"); using (var output = File.Open(toFile, FileMode.Create)) using (var input = File.OpenRead(gameFile)) { BSDiff.Apply(input, () => new MemoryStream(patchData), output); } }
private void GenerateCleanedESM(CleanedESM directive) { var filename = Path.GetFileName(directive.To); var game_file = Path.Combine(GameFolder, "Data", filename); Info($"Generating cleaned ESM for {filename}"); if (!File.Exists(game_file)) { throw new InvalidDataException($"Missing {filename} at {game_file}"); } Status($"Hashing game version of {filename}"); var sha = Utils.FileSHA256(game_file); if (sha != directive.SourceESMHash) { throw new InvalidDataException($"Cannot patch {filename} from the game folder hashes don't match have you already cleaned the file?"); } var patch_data = directive.SourceData.FromBase64(); var to_file = Path.Combine(Outputfolder, directive.To); Status($"Patching {filename}"); using (var output = File.OpenWrite(to_file)) { BSDiff.Apply(File.OpenRead(game_file), () => new MemoryStream(patch_data), output); } }
public async Task DiffCreateAndApply(byte[] src, byte[] dest, DiffMethod method) { await using var ms = new MemoryStream(); switch (method) { case DiffMethod.Default: await Utils.CreatePatch(src, dest, ms); break; case DiffMethod.BSDiff: BSDiff.Create(src, dest, ms); break; case DiffMethod.OctoDiff: OctoDiff.Create(src, dest, ms); break; default: throw new ArgumentOutOfRangeException(nameof(method), method, null); } ms.Position = 0; var patch = ms.ToArray(); await using var resultStream = new MemoryStream(); Utils.ApplyPatch(new MemoryStream(src), () => new MemoryStream(patch), resultStream); Assert.Equal(dest, resultStream.ToArray()); }
private void BuildArchivePatches(string archive_sha, IEnumerable <PatchedFromArchive> group, Dictionary <string, string> absolute_paths) { var archive = IndexedArchives.First(a => a.Hash == archive_sha); var paths = group.Select(g => g.From).ToHashSet(); var streams = new Dictionary <string, MemoryStream>(); Status($"Extracting {paths.Count} patch files from {archive.Name}"); // First we fetch the source files from the input archive using (var a = new ArchiveFile(archive.AbsolutePath)) { a.Extract(entry => { if (!paths.Contains(entry.FileName)) { return(null); } var result = new MemoryStream(); streams.Add(entry.FileName, result); return(result); }, false); } /* * using (var a = ArchiveFactory.Open(archive.AbsolutePath)) * { * foreach (var entry in a.Entries) * { * var path = entry.Key.Replace("/", "\\"); * if (!paths.Contains(path)) continue; * var result = new MemoryStream(); * streams.Add(path, result); * Info("Extracting {0}", path); * using (var stream = entry.OpenEntryStream()) * stream.CopyTo(result); * } * }*/ var extracted = streams.ToDictionary(k => k.Key, v => v.Value.ToArray()); // Now Create the patches Status("Building Patches for {0}", archive.Name); group.PMap(entry => { Info("Patching {0}", entry.To); var ss = extracted[entry.From]; using (var origin = new MemoryStream(ss)) using (var output = new MemoryStream()) { var a = origin.ReadAll(); var b = LoadDataForTo(entry.To, absolute_paths); BSDiff.Create(a, b, output); entry.Patch = output.ToArray().ToBase64(); } }); }
private void InstallArchive(Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status($"Extracting {archive.Name}"); var vfiles = grouping.Select(g => { var file = VFS.FileForArchiveHashPath(g.ArchiveHashPath); g.FromFile = file; return(g); }).ToList(); var on_finish = VFS.Stage(vfiles.Select(f => f.FromFile).Distinct()); Status("Copying files for {0}", archive.Name); vfiles.DoIndexed((idx, file) => { Utils.Status($"Installing files", idx * 100 / vfiles.Count); File.Copy(file.FromFile.StagedPath, Path.Combine(Outputfolder, file.To)); }); Status("Unstaging files"); on_finish(); // Now patch all the files from this archive foreach (var to_patch in grouping.OfType <PatchedFromArchive>()) { using (var patch_stream = new MemoryStream()) { Status("Patching {0}", Path.GetFileName(to_patch.To)); // Read in the patch data var patch_data = to_patch.Patch.FromBase64(); var to_file = Path.Combine(Outputfolder, to_patch.To); MemoryStream old_data = new MemoryStream(File.ReadAllBytes(to_file)); // Remove the file we're about to patch File.Delete(to_file); // Patch it using (var out_stream = File.OpenWrite(to_file)) { BSDiff.Apply(old_data, () => new MemoryStream(patch_data), out_stream); } } } }
public static void GenerateMerges(Installer installer) { installer.ModList .Directives .OfType <MergedPatch>() .PMap(m => { Utils.LogStatus($"Generating zEdit merge: {m.To}"); var src_data = m.Sources.Select(s => File.ReadAllBytes(Path.Combine(installer.Outputfolder, s.RelativePath))) .ConcatArrays(); var patch_data = installer.LoadBytesFromPath(m.PatchID); using (var fs = File.OpenWrite(Path.Combine(installer.Outputfolder, m.To))) BSDiff.Apply(new MemoryStream(src_data), () => new MemoryStream(patch_data), fs); }); }
private void BuildArchivePatches(string archive_sha, IEnumerable <PatchedFromArchive> group, Dictionary <string, string> absolute_paths) { var archive = VFS.HashIndex[archive_sha]; using (var files = VFS.StageWith(group.Select(g => VFS.FileForArchiveHashPath(g.ArchiveHashPath)))) { var by_path = files.GroupBy(f => string.Join("|", f.Paths.Skip(1))).ToDictionary(f => f.Key, f => f.First()); // Now Create the patches group.PMap(entry => { Info("Patching {0}", entry.To); using (var origin = by_path[string.Join("|", entry.ArchiveHashPath.Skip(1))].OpenRead()) using (var output = new MemoryStream()) { var a = origin.ReadAll(); var b = LoadDataForTo(entry.To, absolute_paths); BSDiff.Create(a, b, output); entry.Patch = output.ToArray().ToBase64(); Info($"Patch size {entry.Patch.Length} for {entry.To}"); } }); } }
private Func <RawSourceFile, Directive> PatchStockESMs() { return(source => { string filename = Path.GetFileName(source.Path); string game_file = Path.Combine(GamePath, "Data", filename); if (Consts.GameESMs.Contains(filename) && source.Path.StartsWith("mods\\") && File.Exists(game_file)) { Info($"A ESM named {filename} was found in a mod that shares a name with a core game ESMs, it is assumed this is a cleaned ESM and it will be binary patched."); var result = source.EvolveTo <CleanedESM>(); result.SourceESMHash = VFS.Lookup(game_file).Hash; Status($"Generating patch of {filename}"); using (var ms = new MemoryStream()) { BSDiff.Create(File.ReadAllBytes(game_file), File.ReadAllBytes(source.AbsolutePath), ms); result.SourceData = ms.ToArray().ToBase64(); } Info($"Generated a {result.SourceData.Length} byte patch for {filename}"); return result; } return null; }); }
private async Task InstallArchive(WorkQueue queue, Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status($"Extracting {archive.Name}"); List <FromArchive> vFiles = grouping.Select(g => { var file = VFS.Index.FileForArchiveHashPath(g.ArchiveHashPath); g.FromFile = file; return(g); }).ToList(); var onFinish = await VFS.Stage(vFiles.Select(f => f.FromFile).Distinct()); Status($"Copying files for {archive.Name}"); void CopyFile(string from, string to, bool useMove) { if (File.Exists(to)) { var fi = new FileInfo(to); if (fi.IsReadOnly) { fi.IsReadOnly = false; } File.Delete(to); } if (File.Exists(from)) { var fi = new FileInfo(from); if (fi.IsReadOnly) { fi.IsReadOnly = false; } } if (useMove) { File.Move(from, to); } else { File.Copy(from, to); } // If we don't do this, the file will use the last-modified date of the file when it was compressed // into an archive, which isn't really what we want in the case of files installed archives File.SetLastWriteTime(to, DateTime.Now); } await vFiles.GroupBy(f => f.FromFile) .PDoIndexed(queue, (idx, group) => { Utils.Status("Installing files", idx * 100 / vFiles.Count); var firstDest = Path.Combine(OutputFolder, group.First().To); CopyFile(group.Key.StagedPath, firstDest, true); foreach (var copy in group.Skip(1)) { var nextDest = Path.Combine(OutputFolder, copy.To); CopyFile(firstDest, nextDest, false); } }); Status("Unstaging files"); onFinish(); // Now patch all the files from this archive foreach (var toPatch in grouping.OfType <PatchedFromArchive>()) { using (var patchStream = new MemoryStream()) { Status($"Patching {Path.GetFileName(toPatch.To)}"); // Read in the patch data byte[] patchData = LoadBytesFromPath(toPatch.PatchID); var toFile = Path.Combine(OutputFolder, toPatch.To); var oldData = new MemoryStream(File.ReadAllBytes(toFile)); // Remove the file we're about to patch File.Delete(toFile); // Patch it using (var outStream = File.Open(toFile, FileMode.Create)) { BSDiff.Apply(oldData, () => new MemoryStream(patchData), outStream); } Status($"Verifying Patch {Path.GetFileName(toPatch.To)}"); var resultSha = toFile.FileHash(); if (resultSha != toPatch.Hash) { throw new InvalidDataException($"Invalid Hash for {toPatch.To} after patching"); } } } }
private void InstallArchive(Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status("Extracting {0}", archive.Name); var files = grouping.GroupBy(e => e.From) .ToDictionary(e => e.Key); using (var a = new ArchiveFile(absolutePath)) { a.Extract(entry => { if (files.TryGetValue(entry.FileName, out var directives)) { var directive = directives.First(); var absolute = Path.Combine(Outputfolder, directive.To); if (absolute.FileExists()) { File.Delete(absolute); } return(File.OpenWrite(absolute)); } return(null); }); } Status("Copying duplicated files for {0}", archive.Name); foreach (var dups in files.Where(e => e.Value.Count() > 1).Select(v => v.Value)) { var ffrom = dups.First(); var from_path = Path.Combine(Outputfolder, ffrom.To); foreach (var to in dups.Skip(1)) { var to_path = Path.Combine(Outputfolder, to.To); if (to_path.FileExists()) { File.Delete(to_path); } File.Copy(from_path, to_path); } } ; // Now patch all the files from this archive foreach (var to_patch in grouping.OfType <PatchedFromArchive>()) { using (var patch_stream = new MemoryStream()) { Status("Patching {0}", Path.GetFileName(to_patch.To)); // Read in the patch data var patch_data = to_patch.Patch.FromBase64(); var to_file = Path.Combine(Outputfolder, to_patch.To); MemoryStream old_data = new MemoryStream(File.ReadAllBytes(to_file)); // Remove the file we're about to patch File.Delete(to_file); // Patch it using (var out_stream = File.OpenWrite(to_file)) { BSDiff.Apply(old_data, () => new MemoryStream(patch_data), out_stream); } } } }