public void RequestCancellationOnRunningTasks() { lock (_gate) { // request to cancel all running works _cancellationMap.Do(p => p.Value.Cancel()); } }
public StackFrame Clone() { var newFrame = new StackFrame(LastFrame, CaseSensitivity); _frameSymbols.Do((keyValue) => newFrame._frameSymbols.Add(keyValue.Key, keyValue.Value)); return(newFrame); }
/// <summary>Gets Harmony version for all active Harmony instances</summary> /// <param name="currentVersion">[out] The current Harmony version</param> /// <returns>A dictionary containing assembly version keyed by Harmony ID</returns> /// public static Dictionary <string, Version> VersionInfo(out Version currentVersion) { currentVersion = typeof(Harmony).Assembly.GetName().Version; var assemblies = new Dictionary <string, Assembly>(); GetAllPatchedMethods().Do(method => { PatchInfo info; lock (locker) { info = HarmonySharedState.GetPatchInfo(method); } info.prefixes.Do(fix => assemblies[fix.owner] = fix.PatchMethod.DeclaringType.Assembly); info.postfixes.Do(fix => assemblies[fix.owner] = fix.PatchMethod.DeclaringType.Assembly); info.transpilers.Do(fix => assemblies[fix.owner] = fix.PatchMethod.DeclaringType.Assembly); info.finalizers.Do(fix => assemblies[fix.owner] = fix.PatchMethod.DeclaringType.Assembly); }); var result = new Dictionary <string, Version>(); assemblies.Do(info => { var assemblyName = info.Value.GetReferencedAssemblies().FirstOrDefault(a => a.FullName.StartsWith("0Harmony, Version", StringComparison.Ordinal)); if (assemblyName != null) { result[info.Key] = assemblyName.Version; } }); return(result); }
public string AddManualDownload(Dictionary <string, byte[]> contents) { var name = RandomName() + ".zip"; using (FileStream fs = new FileStream(Path.Combine(DownloadsFolder, name), FileMode.Create)) using (ZipArchive archive = new ZipArchive(fs, ZipArchiveMode.Create)) { contents.Do(kv => { var entry = archive.CreateEntry(kv.Key); using (var os = entry.Open()) os.Write(kv.Value, 0, kv.Value.Length); }); } File.WriteAllLines(Path.Combine(DownloadsFolder, name + Consts.MetaFileExtension), new string[] { "[General]", "manualURL=<TESTING>" }); return(name); }
public override bool Prepare(Node node) { if (!base.Prepare(node)) { return(false); } var DebugLevel = node.DebugLevel; var DebugMessage = node.DebugMessage; var targets = Targets(node.inputPorts); FieldToStat = new Dictionary <FieldInfo, StatDef>(); StatDefOfFieldInfo = new List <FieldInfo>(); foreach (Type type in GenTypes.AllTypesWithAttribute <DefOf>()) { foreach (FieldInfo field in type.GetFields().Where(t => t.FieldType == typeof(StatDef))) { if (targets.GetData <StatDef>().FirstOrFallback(t => field.Name.Equals(t.defName)) is StatDef stat) { StatDefOfFieldInfo.Add(field); FieldToStat.Add(field, stat); } } } if (DebugLevel > 1) { DebugMessage.AppendLine($"Initialize stage: From [{defName}]: StatSearch method:\nFieldToStat:"); FieldToStat.Do(t => DebugMessage.AppendLine($"{t.Key} : {t.Value} [{t.Key.DeclaringType}]")); } return(true); }
public Dictionary <string, Version> VersionInfo(out Version currentVersion) { currentVersion = Assembly.GetExecutingAssembly().GetName().Version; var assemblies = new Dictionary <string, Assembly>(); GetPatchedMethods().Do(method => { var info = HarmonySharedState.GetPatchInfo(method); info.prefixes.Do(fix => assemblies[fix.owner] = fix.patch.DeclaringType.Assembly); info.postfixes.Do(fix => assemblies[fix.owner] = fix.patch.DeclaringType.Assembly); info.transpilers.Do(fix => assemblies[fix.owner] = fix.patch.DeclaringType.Assembly); }); var result = new Dictionary <string, Version>(); assemblies.Do(info => { /*var assemblyName = info.Value.GetReferencedAssemblies().FirstOrDefault(a => a.FullName.StartsWith("0Harmony, Version")); * if (assemblyName != null) * result[info.Key] = assemblyName.Version; */ //result[info.Key] = info.Version; }); return(result); }
public void DoWithEmpty() { Dictionary <int, string> source = null; var result = new List <string>(); source.Do((k, v) => result.Add(k.ToString() + "-" + v)); Assert.AreEqual(0, result.Count); }
public void Dispose() { lock (_gate) { Dispose_NoLock(); _cancellationMap.Do(p => p.Value.Cancel()); _cancellationMap.Clear(); } }
public Dictionary <string, Version> VersionInfo(out Version currentVersion) { currentVersion = Assembly.GetExecutingAssembly().GetName().Version; var assemblies = new Dictionary <string, Assembly>(); Action <Patch> a1 = null; Action <Patch> a2 = null; Action <Patch> a3 = null; GetPatchedMethods().Do(delegate(MethodBase method) { var patchInfo = HarmonySharedState.GetPatchInfo(method); IEnumerable <Patch> prefixes = patchInfo.prefixes; Action <Patch> action; if ((action = a1) == null) { action = (a1 = delegate(Patch fix) { assemblies[fix.owner] = fix.patch.DeclaringType.Assembly; }); } prefixes.Do(action); IEnumerable <Patch> postfixes = patchInfo.postfixes; Action <Patch> action2; if ((action2 = a2) == null) { action2 = (a2 = delegate(Patch fix) { assemblies[fix.owner] = fix.patch.DeclaringType.Assembly; }); } postfixes.Do(action2); IEnumerable <Patch> transpilers = patchInfo.transpilers; Action <Patch> action3; if ((action3 = a3) == null) { action3 = (a3 = delegate(Patch fix) { assemblies[fix.owner] = fix.patch.DeclaringType.Assembly; }); } transpilers.Do(action3); }); var result = new Dictionary <string, Version>(); assemblies.Do(delegate(KeyValuePair <string, Assembly> info) { var assemblyName = info.Value.GetReferencedAssemblies() .FirstOrDefault(a => a.FullName.StartsWith("0Harmony, Version")); if (assemblyName != null) { result[info.Key] = assemblyName.Version; } }); return(result); }
public void Dispose() { if (IsAborted) { return; } if (this == Root) // Root { Root = null; if (IsCompleted) { // Happy scenario: Connections.Do(x => x.Value.Item1.Close()); } else // Root is not completed. { IsAborted = true; Connections.Do(x => x.Value.Item2.Rollback()); Connections.Do(x => x.Value.Item2.Dispose()); Connections.Do(x => x.Value.Item1.Close()); TransactionRolledBack?.Invoke(this, EventArgs.Empty); } } else { Current = Parent; if (IsCompleted) { // A Sub-transaction has been happily completed. // Just wait for the parent. } else { // A sub transaction is not completed. Root?.Dispose(); } } }
public void DoWithNotEmpty() { var source = new Dictionary<int, string> { { 1, "a" }, { 2, "b" }, { 3, "c" } }; var result = new List<string>(); source.Do((k, v) => result.Add(k.ToString() + "-" + v)); Assert.AreEqual(source.Count, result.Count); Assert.AreEqual("1-" + source[1], result[0]); Assert.AreEqual("2-" + source[2], result[1]); Assert.AreEqual("3-" + source[3], result[2]); }
public void DoWithNotEmpty() { var source = new Dictionary <int, string> { { 1, "a" }, { 2, "b" }, { 3, "c" } }; var result = new List <string>(); source.Do((k, v) => result.Add(k.ToString() + "-" + v)); Assert.AreEqual(source.Count, result.Count); Assert.AreEqual("1-" + source[1], result[0]); Assert.AreEqual("2-" + source[2], result[1]); Assert.AreEqual("3-" + source[3], result[2]); }
internal static IEnumerator <ProgressReport> BuildModManifestEntriesLoop() { stopwatch.Start(); // there are no mods loaded, just return if (modLoadOrder == null || modLoadOrder.Count == 0) { yield break; } Log(""); var jsonMerges = new Dictionary <string, List <string> >(); var manifestMods = modLoadOrder.Where(name => entriesByMod.ContainsKey(name)).ToList(); var entryCount = 0; var numEntries = 0; entriesByMod.Do(entries => numEntries += entries.Value.Count); foreach (var modName in manifestMods) { Log($"{modName}:"); foreach (var modEntry in entriesByMod[modName]) { yield return(new ProgressReport(entryCount++ / ((float)numEntries), $"Loading {modName}", modEntry.Id)); // type being null means we have to figure out the type from the path (StreamingAssets) if (modEntry.Type == null) { // TODO: + 16 is a little bizzare looking, it's the length of the substring + 1 because we want to get rid of it and the \ var relPath = modEntry.Path.Substring(modEntry.Path.LastIndexOf("StreamingAssets", StringComparison.Ordinal) + 16); var fakeStreamingAssetsPath = Path.GetFullPath(Path.Combine(StreamingAssetsDirectory, relPath)); if (!File.Exists(fakeStreamingAssetsPath)) { Log($"\tCould not find a file at {fakeStreamingAssetsPath} for {modName} {modEntry.Id}. NOT LOADING THIS FILE"); continue; } var types = GetTypesFromCacheOrManifest(CachedVersionManifest, modEntry.Id); if (types == null) { Log($"\tCould not find an existing VersionManifest entry for {modEntry.Id}. Is this supposed to be a new entry? Don't put new entries in StreamingAssets!"); continue; } // this is getting merged later and then added to the BTRL entries then if (Path.GetExtension(modEntry.Path).ToLower() == ".json" && modEntry.ShouldMergeJSON) { if (!jsonMerges.ContainsKey(modEntry.Id)) { jsonMerges[modEntry.Id] = new List <string>(); } if (jsonMerges[modEntry.Id].Contains(modEntry.Path)) // TODO: is this necessary? { continue; } // this assumes that .json can only have a single type // typeCache will always contain this path modEntry.Type = GetTypesFromCache(modEntry.Id)[0]; Log($"\tMerge: \"{GetRelativePath(modEntry.Path, ModsDirectory)}\" ({modEntry.Type})"); jsonMerges[modEntry.Id].Add(modEntry.Path); continue; } foreach (var type in types) { var subModEntry = new ModDef.ManifestEntry(modEntry, modEntry.Path, modEntry.Id); subModEntry.Type = type; AddModEntry(CachedVersionManifest, subModEntry); // clear json merges for this entry, mod is overwriting the original file, previous mods merges are tossed if (jsonMerges.ContainsKey(modEntry.Id)) { jsonMerges.Remove(modEntry.Id); Log($"\t\tHad merges for {modEntry.Id} but had to toss, since original file is being replaced"); } } continue; } // get "fake" entries that don't actually go into the game's VersionManifest // add videos to be loaded from an external path switch (modEntry.Type) { case "Video": var fileName = Path.GetFileName(modEntry.Path); if (fileName != null && File.Exists(modEntry.Path)) { Log($"\tVideo: \"{GetRelativePath(modEntry.Path, ModsDirectory)}\""); ModVideos.Add(fileName, modEntry.Path); } continue; case "AdvancedJSONMerge": var id = AdvancedJSONMerger.GetTargetID(modEntry.Path); // need to add the types of the file to the typeCache, so that they can be used later // if merging onto a file added by another mod, the type is already in the cache var types = GetTypesFromCacheOrManifest(CachedVersionManifest, id); if (!jsonMerges.ContainsKey(id)) { jsonMerges[id] = new List <string>(); } if (jsonMerges[id].Contains(modEntry.Path)) // TODO: is this necessary? { continue; } Log($"\tAdvancedJSONMerge: \"{GetRelativePath(modEntry.Path, ModsDirectory)}\" ({types[0]})"); jsonMerges[id].Add(modEntry.Path); continue; } // non-streamingassets json merges if (Path.GetExtension(modEntry.Path)?.ToLower() == ".json" && modEntry.ShouldMergeJSON) { // have to find the original path for the manifest entry that we're merging onto var matchingEntry = GetEntryFromCachedOrBTRLEntries(modEntry.Id); if (matchingEntry == null) { Log($"\tCould not find an existing VersionManifest entry for {modEntry.Id}!"); continue; } var matchingPath = Path.GetFullPath(matchingEntry.FilePath); if (!jsonMerges.ContainsKey(modEntry.Id)) { jsonMerges[modEntry.Id] = new List <string>(); } if (jsonMerges[modEntry.Id].Contains(modEntry.Path)) // TODO: is this necessary? { continue; } Log($"\tMerge: \"{GetRelativePath(modEntry.Path, ModsDirectory)}\" ({modEntry.Type})"); // this assumes that .json can only have a single type modEntry.Type = matchingEntry.Type; TryAddTypeToCache(modEntry.Id, modEntry.Type); jsonMerges[modEntry.Id].Add(modEntry.Path); continue; } AddModEntry(CachedVersionManifest, modEntry); TryAddTypeToCache(modEntry.Id, modEntry.Type); // clear json merges for this entry, mod is overwriting the original file, previous mods merges are tossed if (jsonMerges.ContainsKey(modEntry.Id)) { jsonMerges.Remove(modEntry.Id); Log($"\t\tHad merges for {modEntry.Id} but had to toss, since original file is being replaced"); } } } WriteJsonFile(TypeCachePath, typeCache); // perform merges into cache Log(""); LogWithDate("Doing merges..."); yield return(new ProgressReport(1, "Merging", "")); var mergeCount = 0; foreach (var id in jsonMerges.Keys) { var existingEntry = GetEntryFromCachedOrBTRLEntries(id); if (existingEntry == null) { Log($"\tHave merges for {id} but cannot find an original file! Skipping."); continue; } var originalPath = Path.GetFullPath(existingEntry.FilePath); var mergePaths = jsonMerges[id]; if (!jsonMergeCache.HasCachedEntry(originalPath, mergePaths)) { yield return(new ProgressReport(mergeCount++ / ((float)jsonMerges.Count), "Merging", id)); } var cachePath = jsonMergeCache.GetOrCreateCachedEntry(originalPath, mergePaths); // something went wrong (the parent json prob had errors) if (cachePath == null) { continue; } var cacheEntry = new ModDef.ManifestEntry(cachePath) { ShouldMergeJSON = false, Type = GetTypesFromCache(id)[0], // this assumes only one type for each json file Id = id }; AddModEntry(CachedVersionManifest, cacheEntry); } jsonMergeCache.WriteCacheToDisk(Path.Combine(CacheDirectory, MERGE_CACHE_FILE_NAME)); Log(""); Log("Syncing Database"); yield return(new ProgressReport(1, "Syncing Database", "")); // check if files removed from DB cache var rebuildDB = false; var replacementEntries = new List <VersionManifestEntry>(); var removeEntries = new List <string>(); foreach (var path in dbCache.Keys) { var absolutePath = ResolvePath(path, GameDirectory); // check if the file in the db cache is still used if (BTRLEntries.Exists(x => x.Path == absolutePath)) { continue; } Log($"\tNeed to remove DB entry from file in path: {path}"); // file is missing, check if another entry exists with same filename in manifest or in BTRL entries var fileName = Path.GetFileName(path); var existingEntry = BTRLEntries.FindLast(x => Path.GetFileName(x.Path) == fileName)?.GetVersionManifestEntry() ?? CachedVersionManifest.Find(x => Path.GetFileName(x.FilePath) == fileName); if (existingEntry == null) { Log("\t\tHave to rebuild DB, no existing entry in VersionManifest matches removed entry"); rebuildDB = true; break; } replacementEntries.Add(existingEntry); removeEntries.Add(path); } // add removed entries replacements to db if (!rebuildDB) { // remove old entries foreach (var removeEntry in removeEntries) { dbCache.Remove(removeEntry); } using (var metadataDatabase = new MetadataDatabase()) { foreach (var replacementEntry in replacementEntries) { if (AddModEntryToDB(metadataDatabase, Path.GetFullPath(replacementEntry.FilePath), replacementEntry.Type)) { Log($"\t\tReplaced DB entry with an existing entry in path: {Path.GetFullPath(replacementEntry.FilePath)}"); } } } } // if an entry has been removed and we cannot find a replacement, have to rebuild the mod db if (rebuildDB) { if (File.Exists(ModMDDBPath)) { File.Delete(ModMDDBPath); } File.Copy(MDDBPath, ModMDDBPath); dbCache = new Dictionary <string, DateTime>(); } // add needed files to db var addCount = 0; using (var metadataDatabase = new MetadataDatabase()) { foreach (var modEntry in BTRLEntries) { if (modEntry.AddToDB && AddModEntryToDB(metadataDatabase, modEntry.Path, modEntry.Type)) { yield return(new ProgressReport(addCount / ((float)BTRLEntries.Count), "Populating Database", modEntry.Id)); Log($"\tAdded/Updated {modEntry.Id} ({modEntry.Type})"); } addCount++; } } // write db/type cache to disk WriteJsonFile(DBCachePath, dbCache); stopwatch.Stop(); Log(""); LogWithDate($"Done. Elapsed running time: {stopwatch.Elapsed.TotalSeconds} seconds\n"); CloseLogStream(); yield break; }
public Task UploadAll() => Agents.Do(i => HandleRefreshMessage(i.Key));
private void Initialize() { if (_initialized) return; _stateChangeStrings = GetStateChangeStrings(); _actionMap = GetActionMap(); _characterMap = GetCharacterMap(); _defaultStateChangeType = GetDefaultStateChange(); _sequencedDictionary = new SequencedDictionary<char, CharacterType>(); _characterMap.Do(pair => pair.Value.GetStrings().Do(s => _sequencedDictionary.Add(s, pair.Key))); _initialized = true; }
public override bool Perform(Node node) { if (!base.Perform(node)) { return(false); } var typeMethods = BaseInput(node.inputPorts).GetData <TypeMethod>().ToList(); var statListIndexed = InputA(node.inputPorts).GetData <List <ItemPos <StatDef> > >().ToList(); var statJobDef = new Dictionary <StatDef, List <JobDef> >(); var statList = statListIndexed.SelectMany(t => t.Select(tt => tt.target)).ToList(); var duplicateToolType = new Dictionary <JobDef, List <ToolType> >(); statList.RemoveDuplicates(); statList.Do(t => statJobDef.Add(t, new List <JobDef>())); for (int i = 0; i < typeMethods.Count; i++) { var jobDriver = typeMethods[i].type; var jobList = DefDatabase <JobDef> .AllDefs.Where(t => jobDriver.IsAssignableFrom(t.driverClass)); foreach (var stat in statListIndexed[i].Select(t => t.target)) { statJobDef[stat].AddRange(jobList); } } statJobDef.Do(t => t.Value.RemoveDuplicates()); foreach (var toolType in DefDatabase <ToolType> .AllDefs) { toolType.Initialize(); if (!toolType.jobList.NullOrEmpty()) { toolType.jobList.Do(t => Dictionaries.jobToolType.Add(t, toolType)); } else { var jobList = new List <JobDef>(); foreach (var stat in toolType.workStatFactors.Select(t => t.stat)) { if (statJobDef.TryGetValue(stat, out var jobs)) { jobList.AddRange(jobs); } } foreach (var stat in toolType.workStatOffset.Select(t => t.stat)) { if (statJobDef.TryGetValue(stat, out var jobs)) { jobList.AddRange(jobs); } } jobList.RemoveDuplicates(); foreach (var job in jobList.Where(t => !toolType.jobException.Contains(t))) { if (Dictionaries.jobToolType.ContainsKey(job)) { if (duplicateToolType.TryGetValue(job, out var list)) { list.Add(toolType); } else { duplicateToolType.Add(job, new List <ToolType> { Dictionaries.jobToolType[job], toolType, }); } } else { Dictionaries.jobToolType.Add(job, toolType); } } } } if (!duplicateToolType.EnumerableNullOrEmpty()) { duplicateToolType.Keys.Do(t => Dictionaries.jobToolType.Remove(t)); if (node.DebugLevel > -1) { var warn = new StringBuilder("TF_BaseMessage".Translate() + ": Following jobs are ignored due to duplicate ToolTypes assigned:\n"); foreach (var item in duplicateToolType) { warn.Append($"{item.Key} : "); foreach (var toolType in item.Value) { warn.Append($"{toolType}, "); } warn.Length -= 2; warn.AppendLine(); } Log.Warning(warn.ToString()); } } foreach (var item in Dictionaries.jobToolType) { var toolType = item.Value; var stats = toolType.workStatFactors.Select(t => t.stat).Union(toolType.workStatOffset.Select(t => t.stat)).ToList(); stats.RemoveDuplicates(); foreach (var stat in stats) { Dictionaries.jobStatToolType.Add((item.Key, stat), toolType); } } if (node.DebugLevel > 0) { node.DebugMessage.AppendLine("TF_BaseMessage".Translate() + " JobDef <-> ToolType assignment"); Dictionaries.jobToolType.Do(t => node.DebugMessage.AppendLine($"{t.Key} : {t.Value}")); node.DebugMessage.AppendLine("\n"); Dictionaries.jobStatToolType.Do(t => node.DebugMessage.AppendLine($"{t.Key.job} : {t.Key.stat} : {t.Value}")); node.DebugMessage.AppendLine(); Dictionaries.billGiverToolType.Do(t => node.DebugMessage.AppendLine($"{t.Key} : {t.Value}")); node.DebugMessage.AppendLine(); } return(true); }
private Task Connect(HubConnection connection) { connection.StateChanged += change => _states.OnNext((ConnectionState)change.NewState); connection.Error += exception => _states.OnNext(ConnectionState.Error); var errorsProxy = connection.CreateHubProxy("relmah-errors"); //streams by error type ErrorTypes = _errors.GroupBy(e => new ErrorType(e.SourceId, e.Error.Type)); //errors errorsProxy.On <ErrorPayload>( "error", p => _errors.OnNext(p)); //sources visibility var sources = new HashSet <string>(); errorsProxy.On <IEnumerable <Source>, IEnumerable <Source> >( "sources", (es, rs) => { foreach (var e in es.Where(e => !sources.Contains(e.SourceId))) { _sources.OnNext(new SourceOperation(e, SourceOperationType.Added)); sources.Add(e.SourceId); } foreach (var r in rs.Where(e => sources.Contains(e.SourceId))) { _sources.OnNext(new SourceOperation(r, SourceOperationType.Removed)); sources.Remove(r.SourceId); } }); //recaps var groups = new Dictionary <string, IDisposable>(); errorsProxy.On <Recap>( "recap", p => { groups["*"] = ErrorTypes.Subscribe(et => { var key = et.Key.SourceId + '-' + et.Key.Type; groups.Do(key, d => d.Dispose()); var rs = from a in p.Sources where a.SourceId == et.Key.SourceId from b in a.Types where b.Name == et.Key.Type select b.Measure; var r = rs.Aggregate(0, (acc, cur) => acc + cur); groups[key] = et .Scan(0, (ka, ep) => ka + 1) .Subscribe(e => { _recaps.OnNext(new RecapAggregate(et.Key.SourceId, et.Key.Type, e + r)); }); }); }); return(connection.Start()); }