public PatchContext(UrlDir.UrlConfig patchUrl, IEnumerable <IProtoUrlConfig> databaseConfigs, IBasicLogger logger, IPatchProgress progress) { this.patchUrl = patchUrl; this.databaseConfigs = databaseConfigs; this.logger = logger; this.progress = progress; }
public PatchContext(UrlDir.UrlConfig patchUrl, UrlDir databaseRoot, IBasicLogger logger, IPatchProgress progress) { this.patchUrl = patchUrl; this.databaseRoot = databaseRoot; this.logger = logger; this.progress = progress; }
public PatchExtractorTest() { root = UrlBuilder.CreateRoot(); file = UrlBuilder.CreateFile("abc/def.cfg", root); progress = Substitute.For <IPatchProgress>(); }
public NeedsChecker(IEnumerable <string> mods, UrlDir gameData, IPatchProgress progress, IBasicLogger logger) { this.mods = mods ?? throw new ArgumentNullException(nameof(mods)); this.gameData = gameData ?? throw new ArgumentNullException(nameof(gameData)); this.progress = progress ?? throw new ArgumentNullException(nameof(progress)); this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); }
public NeedsCheckerTest() { gameData = UrlBuilder.CreateGameData(); progress = Substitute.For <IPatchProgress>(); logger = Substitute.For <IBasicLogger>(); needsChecker = new NeedsChecker(new[] { "mod1", "mod2", "mod/2" }, gameData, progress, logger); }
public PatchApplier(PatchList patchList, UrlDir databaseRoot, IPatchProgress progress, IBasicLogger logger) { this.patchList = patchList; this.databaseRoot = databaseRoot; this.progress = progress; this.logger = logger; allConfigFiles = databaseRoot.AllConfigFiles.ToArray(); }
public PatchApplierTest() { logger = Substitute.For <IBasicLogger>(); progress = Substitute.For <IPatchProgress>(); databaseRoot = UrlBuilder.CreateRoot(); file = UrlBuilder.CreateFile("abc/def.cfg", databaseRoot); patchList = new PatchList(modList); patchApplier = new PatchApplier(patchList, databaseRoot, progress, logger); }
public NeedsCheckerTest() { root = UrlBuilder.CreateRoot(); gameData = UrlBuilder.CreateGameData(root); file = UrlBuilder.CreateFile("abc/def.cfg", gameData); progress = Substitute.For <IPatchProgress>(); logger = Substitute.For <IBasicLogger>(); }
public void TestApply() { ConfigNode config1 = new ConfigNode("NODE"); ConfigNode config2 = new ConfigNode("NODE"); ConfigNode config3 = new ConfigNode("NODE"); ConfigNode config4 = new ConfigNode("NODE"); INodeMatcher nodeMatcher = Substitute.For <INodeMatcher>(); nodeMatcher.IsMatch(config1).Returns(false); nodeMatcher.IsMatch(config2).Returns(true); nodeMatcher.IsMatch(config3).Returns(false); nodeMatcher.IsMatch(config4).Returns(true); DeletePatch patch = new DeletePatch(UrlBuilder.CreateConfig("ghi/jkl", new ConfigNode("!NODE")), nodeMatcher, Substitute.For <IPassSpecifier>()); IProtoUrlConfig urlConfig1 = Substitute.For <IProtoUrlConfig>(); IProtoUrlConfig urlConfig2 = Substitute.For <IProtoUrlConfig>(); IProtoUrlConfig urlConfig3 = Substitute.For <IProtoUrlConfig>(); IProtoUrlConfig urlConfig4 = Substitute.For <IProtoUrlConfig>(); urlConfig1.Node.Returns(config1); urlConfig2.Node.Returns(config2); urlConfig3.Node.Returns(config3); urlConfig4.Node.Returns(config4); LinkedList <IProtoUrlConfig> configs = new LinkedList <IProtoUrlConfig>(); configs.AddLast(urlConfig1); configs.AddLast(urlConfig2); configs.AddLast(urlConfig3); configs.AddLast(urlConfig4); IPatchProgress progress = Substitute.For <IPatchProgress>(); IBasicLogger logger = Substitute.For <IBasicLogger>(); patch.Apply(configs, progress, logger); Assert.Equal(new[] { urlConfig1, urlConfig3 }, configs); Received.InOrder(delegate { progress.ApplyingDelete(urlConfig2, patch.UrlConfig); progress.ApplyingDelete(urlConfig4, patch.UrlConfig); }); progress.DidNotReceiveWithAnyArgs().ApplyingUpdate(null, null); progress.DidNotReceiveWithAnyArgs().ApplyingCopy(null, null); progress.DidNotReceiveWithAnyArgs().Error(null, null); progress.DidNotReceiveWithAnyArgs().Exception(null, null); progress.DidNotReceiveWithAnyArgs().Exception(null, null, null); }
public bool CheckNeeds(INeedsChecker needsChecker, IPatchProgress progress) { if (needsChecker == null) { throw new ArgumentNullException(nameof(needsChecker)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } return(true); }
public PatchExtractorTest() { root = UrlBuilder.CreateRoot(); file = UrlBuilder.CreateFile("abc/def.cfg", root); progress = Substitute.For <IPatchProgress>(); logger = Substitute.For <IBasicLogger>(); needsChecker = Substitute.For <INeedsChecker>(); tagListParser = Substitute.For <ITagListParser>(); protoPatchBuilder = Substitute.For <IProtoPatchBuilder>(); patchCompiler = Substitute.For <IPatchCompiler>(); patchExtractor = new PatchExtractor(progress, logger, needsChecker, tagListParser, protoPatchBuilder, patchCompiler); }
public void TestApply__NameChanged() { UrlDir.UrlFile file = UrlBuilder.CreateFile("abc/def.cfg"); UrlDir.UrlConfig urlConfig = UrlBuilder.CreateConfig(new TestConfigNode("NODE") { { "name", "000" }, { "foo", "bar" }, }, file); INodeMatcher nodeMatcher = Substitute.For <INodeMatcher>(); nodeMatcher.IsMatch(urlConfig.config).Returns(true); CopyPatch patch = new CopyPatch(UrlBuilder.CreateConfig("ghi/jkl", new TestConfigNode("@NODE") { { "@name", "001" }, { "@foo", "baz" }, { "pqr", "stw" }, }), nodeMatcher, Substitute.For <IPassSpecifier>()); IPatchProgress progress = Substitute.For <IPatchProgress>(); IBasicLogger logger = Substitute.For <IBasicLogger>(); patch.Apply(file, progress, logger); Assert.Equal(2, file.configs.Count); Assert.Same(urlConfig, file.configs[0]); AssertNodesEqual(new TestConfigNode("NODE") { { "name", "000" }, { "foo", "bar" }, }, file.configs[0].config); AssertNodesEqual(new TestConfigNode("NODE") { { "name", "001" }, { "foo", "baz" }, { "pqr", "stw" }, }, file.configs[1].config); progress.Received().ApplyingCopy(urlConfig, patch.UrlConfig); progress.DidNotReceiveWithAnyArgs().ApplyingUpdate(null, null); progress.DidNotReceiveWithAnyArgs().ApplyingDelete(null, null); progress.DidNotReceiveWithAnyArgs().Error(null, null); progress.DidNotReceiveWithAnyArgs().Exception(null, null); progress.DidNotReceiveWithAnyArgs().Exception(null, null, null); }
public void TestApply__NameNotChanged() { ConfigNode config = new TestConfigNode("NODE") { { "name", "000" }, { "foo", "bar" }, }; INodeMatcher nodeMatcher = Substitute.For <INodeMatcher>(); nodeMatcher.IsMatch(config).Returns(true); CopyPatch patch = new CopyPatch(UrlBuilder.CreateConfig("ghi/jkl", new TestConfigNode("+NODE") { { "@foo", "baz" }, { "pqr", "stw" }, }), nodeMatcher, Substitute.For <IPassSpecifier>()); IProtoUrlConfig protoConfig = Substitute.For <IProtoUrlConfig>(); protoConfig.Node.Returns(config); protoConfig.FullUrl.Returns("abc/def.cfg/NODE"); LinkedList <IProtoUrlConfig> configs = new LinkedList <IProtoUrlConfig>(); configs.AddLast(protoConfig); IPatchProgress progress = Substitute.For <IPatchProgress>(); IBasicLogger logger = Substitute.For <IBasicLogger>(); patch.Apply(configs, progress, logger); Assert.Single(configs); Assert.Same(protoConfig, configs.First.Value); AssertNodesEqual(new TestConfigNode("NODE") { { "name", "000" }, { "foo", "bar" }, }, configs.First.Value.Node); progress.Received().Error(patch.UrlConfig, "Error - when applying copy ghi/jkl/+NODE to abc/def.cfg/NODE - the copy needs to have a different name than the parent (use @name = xxx)"); progress.DidNotReceiveWithAnyArgs().ApplyingUpdate(null, null); progress.DidNotReceiveWithAnyArgs().ApplyingCopy(null, null); progress.DidNotReceiveWithAnyArgs().ApplyingDelete(null, null); progress.DidNotReceiveWithAnyArgs().Exception(null, null); progress.DidNotReceiveWithAnyArgs().Exception(null, null, null); }
public void Apply(UrlDir.UrlFile file, IPatchProgress progress, IBasicLogger logger) { if (file == null) { throw new ArgumentNullException(nameof(file)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } PatchContext context = new PatchContext(UrlConfig, file.root, logger, progress); for (int i = 0; i < file.configs.Count; i++) { UrlDir.UrlConfig urlConfig = file.configs[i]; try { if (!NodeMatcher.IsMatch(urlConfig.config)) { continue; } if (loop) { logger.Info($"Looping on {UrlConfig.SafeUrl()} to {urlConfig.SafeUrl()}"); } do { progress.ApplyingUpdate(urlConfig, UrlConfig); file.configs[i] = urlConfig = new UrlDir.UrlConfig(file, MMPatchLoader.ModifyNode(new NodeStack(urlConfig.config), UrlConfig.config, context)); } while (loop && NodeMatcher.IsMatch(urlConfig.config)); if (loop) { file.configs[i].config.RemoveNodes("MM_PATCH_LOOP"); } } catch (Exception ex) { progress.Exception(UrlConfig, $"Exception while applying update {UrlConfig.SafeUrl()} to {urlConfig.SafeUrl()}", ex); } } }
public void Apply(LinkedList <IProtoUrlConfig> databaseConfigs, IPatchProgress progress, IBasicLogger logger) { if (databaseConfigs == null) { throw new ArgumentNullException(nameof(databaseConfigs)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } PatchContext context = new PatchContext(UrlConfig, databaseConfigs, logger, progress); for (LinkedListNode <IProtoUrlConfig> listNode = databaseConfigs.First; listNode != null; listNode = listNode.Next) { IProtoUrlConfig protoConfig = listNode.Value; try { if (!NodeMatcher.IsMatch(protoConfig.Node)) { continue; } if (loop) { logger.Info($"Looping on {UrlConfig.SafeUrl()} to {protoConfig.FullUrl}"); } do { progress.ApplyingUpdate(protoConfig, UrlConfig); listNode.Value = protoConfig = new ProtoUrlConfig(protoConfig.UrlFile, MMPatchLoader.ModifyNode(new NodeStack(protoConfig.Node), UrlConfig.config, context)); } while (loop && NodeMatcher.IsMatch(protoConfig.Node)); if (loop) { protoConfig.Node.RemoveNodes("MM_PATCH_LOOP"); } } catch (Exception ex) { progress.Exception(UrlConfig, $"Exception while applying update {UrlConfig.SafeUrl()} to {protoConfig.FullUrl}", ex); } } }
public PatchExtractor( IPatchProgress progress, IBasicLogger logger, INeedsChecker needsChecker, ITagListParser tagListParser, IProtoPatchBuilder protoPatchBuilder, IPatchCompiler patchCompiler ) { this.progress = progress ?? throw new ArgumentNullException(nameof(progress)); this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); this.needsChecker = needsChecker ?? throw new ArgumentNullException(nameof(needsChecker)); this.tagListParser = tagListParser ?? throw new ArgumentNullException(nameof(tagListParser)); this.protoPatchBuilder = protoPatchBuilder ?? throw new ArgumentNullException(nameof(protoPatchBuilder)); this.patchCompiler = patchCompiler ?? throw new ArgumentNullException(nameof(patchCompiler)); }
public void Apply(UrlDir.UrlFile file, IPatchProgress progress, IBasicLogger logger) { if (file == null) { throw new ArgumentNullException(nameof(file)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } PatchContext context = new PatchContext(UrlConfig, file.root, logger, progress); // Avoid checking the new configs we are creating int count = file.configs.Count; for (int i = 0; i < count; i++) { UrlDir.UrlConfig url = file.configs[i]; try { if (!NodeMatcher.IsMatch(url.config)) { continue; } ConfigNode clone = MMPatchLoader.ModifyNode(new NodeStack(url.config), UrlConfig.config, context); if (url.config.HasValue("name") && url.config.GetValue("name") == clone.GetValue("name")) { progress.Error(UrlConfig, $"Error - when applying copy {UrlConfig.SafeUrl()} to {url.SafeUrl()} - the copy needs to have a different name than the parent (use @name = xxx)"); } else { progress.ApplyingCopy(url, UrlConfig); file.AddConfig(clone); } } catch (Exception ex) { progress.Exception(UrlConfig, $"Exception while applying copy {UrlConfig.SafeUrl()} to {url.SafeUrl()}", ex); } } }
public void TestConstructor__UnknownMod() { IPatch patch = Substitute.For <IPatch>(); UrlDir.UrlConfig urlConfig = UrlBuilder.CreateConfig("abc/def", new ConfigNode("NODE")); patch.PassSpecifier.Returns(new BeforePassSpecifier("mod3", urlConfig)); IPatchProgress progress = Substitute.For <IPatchProgress>(); KeyNotFoundException ex = Assert.Throws <KeyNotFoundException>(delegate { new PatchList(new[] { "mod1", "mod2" }, new[] { patch }, progress); }); Assert.Equal("Mod 'mod3' not found", ex.Message); progress.DidNotReceive().PatchAdded(); }
public bool CheckNeeds(INeedsChecker needsChecker, IPatchProgress progress) { if (needsChecker == null) { throw new ArgumentNullException(nameof(needsChecker)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } bool result = needsChecker.CheckNeeds(mod); if (!result) { progress.NeedsUnsatisfiedFor(urlConfig); } return(result); }
public void Apply(LinkedList <IProtoUrlConfig> configs, IPatchProgress progress, IBasicLogger logger) { if (configs == null) { throw new ArgumentNullException(nameof(configs)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } ConfigNode node = UrlConfig.config.DeepCopy(); node.name = NodeType; configs.AddLast(new ProtoUrlConfig(UrlConfig.parent, node)); }
public void TestConstructor__UnknownPassSpecifier() { IPatch patch = Substitute.For <IPatch>(); UrlDir.UrlConfig urlConfig = UrlBuilder.CreateConfig("abc/def", new ConfigNode("NODE")); IPassSpecifier passSpecifier = Substitute.For <IPassSpecifier>(); passSpecifier.Descriptor.Returns(":SOMEPASS"); patch.PassSpecifier.Returns(passSpecifier); IPatchProgress progress = Substitute.For <IPatchProgress>(); NotImplementedException ex = Assert.Throws <NotImplementedException>(delegate { new PatchList(new string[0], new[] { patch }, progress); }); Assert.Equal("Don't know what to do with pass specifier: :SOMEPASS", ex.Message); progress.DidNotReceive().PatchAdded(); }
public void Apply(UrlDir.UrlFile file, IPatchProgress progress, IBasicLogger logger) { if (file == null) { throw new ArgumentNullException(nameof(file)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } int i = 0; while (i < file.configs.Count) { UrlDir.UrlConfig url = file.configs[i]; try { if (NodeMatcher.IsMatch(url.config)) { progress.ApplyingDelete(url, UrlConfig); file.configs.RemoveAt(i); } else { i++; } } catch (Exception ex) { progress.Exception(UrlConfig, $"Exception while applying delete {UrlConfig.SafeUrl()} to {url.SafeUrl()}", ex); } } }
public void Apply(LinkedList <IProtoUrlConfig> databaseConfigs, IPatchProgress progress, IBasicLogger logger) { if (databaseConfigs == null) { throw new ArgumentNullException(nameof(databaseConfigs)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } PatchContext context = new PatchContext(UrlConfig, databaseConfigs, logger, progress); for (LinkedListNode <IProtoUrlConfig> listNode = databaseConfigs.First; listNode != null; listNode = listNode.Next) { IProtoUrlConfig protoConfig = listNode.Value; try { if (!NodeMatcher.IsMatch(protoConfig.Node)) { continue; } ConfigNode clone = MMPatchLoader.ModifyNode(new NodeStack(protoConfig.Node), UrlConfig.config, context); if (protoConfig.Node.GetValue("name") is string name && name == clone.GetValue("name")) { progress.Error(UrlConfig, $"Error - when applying copy {UrlConfig.SafeUrl()} to {protoConfig.FullUrl} - the copy needs to have a different name than the parent (use @name = xxx)"); } else { progress.ApplyingCopy(protoConfig, UrlConfig); listNode = databaseConfigs.AddAfter(listNode, new ProtoUrlConfig(protoConfig.UrlFile, clone)); } }
public void TestApply() { UrlDir.UrlFile file = UrlBuilder.CreateFile("abc/def.cfg"); UrlDir.UrlConfig urlConfig1 = UrlBuilder.CreateConfig(new ConfigNode("NODE"), file); UrlDir.UrlConfig urlConfig2 = UrlBuilder.CreateConfig(new ConfigNode("NODE"), file); UrlDir.UrlConfig urlConfig3 = UrlBuilder.CreateConfig(new ConfigNode("NODE"), file); UrlDir.UrlConfig urlConfig4 = UrlBuilder.CreateConfig(new ConfigNode("NODE"), file); INodeMatcher nodeMatcher = Substitute.For <INodeMatcher>(); nodeMatcher.IsMatch(urlConfig1.config).Returns(false); nodeMatcher.IsMatch(urlConfig2.config).Returns(true); nodeMatcher.IsMatch(urlConfig3.config).Returns(false); nodeMatcher.IsMatch(urlConfig4.config).Returns(true); DeletePatch patch = new DeletePatch(UrlBuilder.CreateConfig("ghi/jkl", new ConfigNode("!NODE")), nodeMatcher, Substitute.For <IPassSpecifier>()); IPatchProgress progress = Substitute.For <IPatchProgress>(); IBasicLogger logger = Substitute.For <IBasicLogger>(); patch.Apply(file, progress, logger); Assert.Equal(new[] { urlConfig1, urlConfig3 }, file.configs); Received.InOrder(delegate { progress.ApplyingDelete(urlConfig2, patch.UrlConfig); progress.ApplyingDelete(urlConfig4, patch.UrlConfig); }); progress.DidNotReceiveWithAnyArgs().ApplyingUpdate(null, null); progress.DidNotReceiveWithAnyArgs().ApplyingCopy(null, null); progress.DidNotReceiveWithAnyArgs().Error(null, null); progress.DidNotReceiveWithAnyArgs().Exception(null, null); progress.DidNotReceiveWithAnyArgs().Exception(null, null, null); }
public void Apply(LinkedList <IProtoUrlConfig> databaseConfigs, IPatchProgress progress, IBasicLogger logger) { if (databaseConfigs == null) { throw new ArgumentNullException(nameof(databaseConfigs)); } if (progress == null) { throw new ArgumentNullException(nameof(progress)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } LinkedListNode <IProtoUrlConfig> currentNode = databaseConfigs.First; while (currentNode != null) { IProtoUrlConfig protoConfig = currentNode.Value; try { LinkedListNode <IProtoUrlConfig> nextNode = currentNode.Next; if (NodeMatcher.IsMatch(protoConfig.Node)) { progress.ApplyingDelete(protoConfig, UrlConfig); databaseConfigs.Remove(currentNode); } currentNode = nextNode; } catch (Exception ex) { progress.Exception(UrlConfig, $"Exception while applying delete {UrlConfig.SafeUrl()} to {protoConfig.FullUrl}", ex); } } }
/// <summary> /// Takes a stream for an old version of a file, preparing a /// block-by-block patch to transform it in to the new version of a /// file. The given formatter is used to output the prepared data, /// which is finally written to the output stream. /// </summary> /// <param name="oldVersionFile"> /// Seekable and readable stream for the old version of data. /// </param> /// <param name="newVersionFile"> /// Seekable and readable stream for the new version of data. /// </param> /// <param name="formatter"> /// Formatter to transform prepared patch data to a useful output. /// </param> /// <param name="output"></param> public void CreatePatch(Stream oldVersionFile, Stream newVersionFile, IPatchFormatter formatter, IPatchProgress prog, Stream output) { if (oldVersionFile == null) throw new NullReferenceException(); if (newVersionFile == null) throw new NullReferenceException(); if (output == null) throw new NullReferenceException(); if (oldVersionFile.CanSeek == false) throw new NotSupportedException(); if (newVersionFile.CanSeek == false) throw new NotSupportedException(); oldVersionFile.Seek(0, SeekOrigin.Begin); newVersionFile.Seek(0, SeekOrigin.Begin); var fileInfo = new PatchFileInformation(); fileInfo.SourceChecksum = MD5.Check(oldVersionFile); oldVersionFile.Seek(0, SeekOrigin.Begin); fileInfo.TargetChecksum = MD5.Check(newVersionFile); newVersionFile.Seek(0, SeekOrigin.Begin); var patchGenerator = new PatchGenerator(oldVersionFile, oldVersionFile.Length, newVersionFile, newVersionFile.Length); patchGenerator.BlockSize = BlockSize; patchGenerator.MaximumMatches = MaximumMatches; List<SameBlock> sameBlocks = new List<SameBlock>(); patchGenerator.Execute(sameBlocks, prog); if (formatter != null) formatter.FormatPatch(fileInfo, sameBlocks, newVersionFile, output); sameBlocks.Clear(); }
public PatchApplyResponse Apply(Stream oldVersion, Stream patchStream, Stream output, IPatchProgress prog) { // Checksum our input to make sure things are okay byte[] oldVersionHash = MD5.Check(oldVersion); oldVersion.Seek(0, SeekOrigin.Begin); // Check if our signature is the same as the output bool isRequired = false; for (int i = 0; i < 16; i++) { if (oldVersionHash[i] != mPatFileInfo.TargetChecksum[i]) { isRequired = true; break; } } if (!isRequired) { return PatchApplyResponse.NotRequired; } // Make sure our file signatures match up for (int i = 0; i < 16; i++) { if (oldVersionHash[i] != mPatFileInfo.SourceChecksum[i]) return PatchApplyResponse.WrongFile; } byte[] copyBuffer = new byte[4096]; BinaryReader br = new BinaryReader(patchStream); for (int currentBlock = 0; currentBlock < (int)mPatFileInfo.BlockCount; currentBlock++) { ulong blockSize = 0; long derp = patchStream.Position; byte blockType = br.ReadByte(); switch (blockType) { // Identical blocks // ================ // Copy an amount of data from the original file in to the new one. case 1: case 2: case 3: // Decode the block length switch (blockType) { case 1: blockSize = (ulong)br.ReadByte(); break; case 2: blockSize = (ulong)br.ReadUInt16(); break; case 3: blockSize = (ulong)br.ReadUInt32(); break; } long sourceOffset = br.ReadUInt32(); oldVersion.Seek(sourceOffset, SeekOrigin.Begin); // If we have a derpyblock or couldn't read it, count it as a failure. if (blockSize < 1) { return PatchApplyResponse.Failed; } // Copy from the source to the output. while (blockSize > 0) { int read = oldVersion.Read(copyBuffer, 0, (int)Math.Min(4096, blockSize)); if (read <= 0) { throw new IOException(); } output.Write(copyBuffer, 0, read); blockSize -= (ulong)read; } break; // Payload delivery blocks // ======================= // Copy an amount of data from our patch file in to the new one. case 5: case 6: case 7: switch (blockType) { case 5: blockSize = (ulong)br.ReadByte(); break; case 6: blockSize = (ulong)br.ReadUInt16(); break; case 7: blockSize = (ulong)br.ReadUInt32(); break; } while (blockSize > 0) { int read = br.Read(copyBuffer, 0, (int)Math.Min(4096, blockSize)); if (read <= 0) { throw new IOException(); } output.Write(copyBuffer, 0, read); blockSize -= (ulong)read; } break; // Its the end of the taco stand, taco taco stand. case 255: // TODO: Should we really care about the timestamp? br.ReadInt64(); break; default: return PatchApplyResponse.Failed; } // Issue any progress updates before moving to the next block if (prog != null) { prog.OnPatchProgress(currentBlock, mPatFileInfo.BlockCount); } } // Make sure we applied the patch correctly output.Seek(0, SeekOrigin.Begin); byte[] patchedFileChecksum = MD5.Check(output); for (int i = 0; i < 16; i++) { if (patchedFileChecksum[i] != mPatFileInfo.TargetChecksum[i]) return PatchApplyResponse.Failed; } // We're done! return PatchApplyResponse.Ok; }
public static IEnumerable <string> GenerateModList(IEnumerable <ModAddedByAssembly> modsAddedByAssemblies, IPatchProgress progress, IBasicLogger logger) { #region List of mods //string envInfo = "ModuleManager env info\n"; //envInfo += " " + Environment.OSVersion.Platform + " " + ModuleManager.intPtr.ToInt64().ToString("X16") + "\n"; //envInfo += " " + Convert.ToString(ModuleManager.intPtr.ToInt64(), 2) + " " + Convert.ToString(ModuleManager.intPtr.ToInt64() >> 63, 2) + "\n"; //string gamePath = Environment.GetCommandLineArgs()[0]; //envInfo += " Args: " + gamePath.Split(Path.DirectorySeparatorChar).Last() + " " + string.Join(" ", Environment.GetCommandLineArgs().Skip(1).ToArray()) + "\n"; //envInfo += " Executable SHA256 " + FileSHA(gamePath); // //log(envInfo); List <string> mods = new List <string>(); StringBuilder modListInfo = new StringBuilder(); modListInfo.Append("compiling list of loaded mods...\nMod DLLs found:\n"); string format = " {0,-40}{1,-25}{2,-25}{3,-25}{4}\n"; modListInfo.AppendFormat( format, "Name", "Assembly Version", "Assembly File Version", "KSPAssembly Version", "SHA256" ); modListInfo.Append('\n'); foreach (AssemblyLoader.LoadedAssembly mod in AssemblyLoader.loadedAssemblies) { if (string.IsNullOrEmpty(mod.assembly.Location)) //Diazo Edit for xEvilReeperx AssemblyReloader mod { continue; } FileVersionInfo fileVersionInfo = FileVersionInfo.GetVersionInfo(mod.assembly.Location); AssemblyName assemblyName = mod.assembly.GetName(); string kspAssemblyVersion; if (mod.versionMajor == 0 && mod.versionMinor == 0) { kspAssemblyVersion = ""; } else { kspAssemblyVersion = mod.versionMajor + "." + mod.versionMinor; } string fileSha = ""; try { fileSha = FileUtils.FileSHA(mod.assembly.Location); } catch (Exception e) { progress.Exception("Exception while generating SHA for assembly " + assemblyName.Name, e); } modListInfo.AppendFormat( format, assemblyName.Name, assemblyName.Version, fileVersionInfo.FileVersion, kspAssemblyVersion, fileSha ); // modlist += String.Format(" {0,-50} SHA256 {1}\n", modInfo, FileSHA(mod.assembly.Location)); if (!mods.Contains(assemblyName.Name, StringComparer.OrdinalIgnoreCase)) { mods.Add(assemblyName.Name); } } modListInfo.Append("Non-DLL mods added (:FOR[xxx]):\n"); foreach (UrlDir.UrlConfig cfgmod in GameDatabase.Instance.root.AllConfigs) { if (CommandParser.Parse(cfgmod.type, out string name) != Command.Insert) { if (name.Contains(":FOR[")) { name = name.RemoveWS(); // check for FOR[] blocks that don't match loaded DLLs and add them to the pass list try { string dependency = name.Substring(name.IndexOf(":FOR[") + 5); dependency = dependency.Substring(0, dependency.IndexOf(']')); if (!mods.Contains(dependency, StringComparer.OrdinalIgnoreCase)) { // found one, now add it to the list. mods.Add(dependency); modListInfo.AppendFormat(" {0}\n", dependency); } } catch (ArgumentOutOfRangeException) { progress.Error(cfgmod, "Skipping :FOR init for line " + name + ". The line most likely contains a space that should be removed"); } } } } modListInfo.Append("Mods by directory (sub directories of GameData):\n"); UrlDir gameData = GameDatabase.Instance.root.children.First(dir => dir.type == UrlDir.DirectoryType.GameData); foreach (UrlDir subDir in gameData.children) { string cleanName = subDir.name.RemoveWS(); if (!mods.Contains(cleanName, StringComparer.OrdinalIgnoreCase)) { mods.Add(cleanName); modListInfo.AppendFormat(" {0}\n", cleanName); } } modListInfo.Append("Mods added by assemblies:\n"); foreach (ModAddedByAssembly mod in modsAddedByAssemblies) { if (!mods.Contains(mod.modName, StringComparer.OrdinalIgnoreCase)) { mods.Add(mod.modName); modListInfo.AppendFormat(" {0}\n", mod); } } logger.Info(modListInfo.ToString()); mods.Sort(); #endregion List of mods return(mods); }
public PatchProgress(IPatchProgress progress, IBasicLogger logger) { this.logger = logger; Counter = progress.Counter; }
public ProtoPatchBuilder(IPatchProgress progress) { this.progress = progress ?? throw new ArgumentNullException(nameof(progress)); }
public TagListParser(IPatchProgress progress) { this.progress = progress ?? throw new ArgumentNullException(nameof(progress)); }
public PatchApplyResponse ApplyPatch(Stream oldVersionFile, Stream patch, IPatchInterpreter interpreter, IPatchProgress prog, Stream output) { if (!interpreter.Analyze(patch)) { return PatchApplyResponse.Failed; } return interpreter.Apply(oldVersionFile, patch, output, prog); }
public bool CheckNeeds(INeedsChecker needsChecker, IPatchProgress progress) => true;
/// <param name="sameBlocks"> /// This list will store blocks that have been found to have remained /// the same between files. /// </param> public void Execute(IList<SameBlock> sameBlocks, IPatchProgress prog) { if (sameBlocks == null) throw new ArgumentNullException(); ChunkedFile sourceTree = new ChunkedFile(mSource, mSourceSize, mBlockSize); // the vector needs an 'empty' first block so checking for overlap with the 'previous' block never fails. sameBlocks.Add(new SameBlock()); mTargetCDataBaseOffset = 0; mTargetCDataSize = 0; bool firstRun = true; // currentOffset is in the target file for (long currentOffset = 0; currentOffset < mTargetSize;) { bool reloadTargetCData = true; if ((currentOffset >= mTargetCDataBaseOffset) && (currentOffset + TargetLookaheadSize < mTargetCDataBaseOffset + TargetBufferSize)) { if (firstRun) { firstRun = false; } else { reloadTargetCData = false; } } if (reloadTargetCData) { // at least support looking back blockSize, if possible (findBlock relies on this!) mTargetCDataBaseOffset = currentOffset - mBlockSize; // handle start of file correctly if (currentOffset < BlockSize) mTargetCDataBaseOffset = 0; mTargetCDataSize = TargetBufferSize; // check if this does not extend beyond EOF if (mTargetCDataBaseOffset + mTargetCDataSize > mTargetSize) { mTargetCDataSize = mTargetSize - mTargetCDataBaseOffset; } // we need to update the memory cache of target // TODO: Emit debug info here, if verbose is enabled. // cout << "[CacheReload] File position = " << static_cast<unsigned long>(targetCDataBaseOffset) << "\n"; if (prog != null) { prog.OnPatchProgress(mTargetCDataBaseOffset, mTargetSize); } mTarget.Seek(mTargetCDataBaseOffset, SeekOrigin.Begin); mTarget.Read(mTargetCData, 0, (int)mTargetCDataSize); } SameBlock currentSameBlock = FindBlock(sourceTree, currentOffset); if (currentSameBlock != null) { // We have a match. SameBlock previousBlock = sameBlocks[sameBlocks.Count-1]; if (previousBlock.TargetOffset + previousBlock.Size > currentSameBlock.TargetOffset) { // There is overlap, resolve it. long difference = previousBlock.TargetOffset + previousBlock.Size - currentSameBlock.TargetOffset; currentSameBlock.SourceOffset += difference; currentSameBlock.TargetOffset += difference; currentSameBlock.Size -= difference; } Console.WriteLine(currentSameBlock.ToString()); sameBlocks.Add(currentSameBlock); // TODO: Emit debug info here, if verbose is enabled. currentOffset = currentSameBlock.TargetOffset + currentSameBlock.Size; } else { // No match, advance to the next byte. currentOffset++; } } // Add a block at the end to prevent bounds checking hassles. SameBlock lastBlock = new SameBlock(); lastBlock.SourceOffset = 0; lastBlock.TargetOffset = mTargetSize; lastBlock.Size = 0; sameBlocks.Add(lastBlock); }