/// <summary> /// Remaps all entity references within the given object using the specified <see cref="EntityRemapUtility.EntityRemapInfo"/>. /// </summary> /// <param name="obj">The object to remap references for.</param> /// <param name="entityRemapInfo">The entity remap information.</param> /// <exception cref="ArgumentNullException">The given object was null.</exception> /// <exception cref="MissingPropertyBagException">The given object has no property bag associated with it.</exception> public void RemapEntityReferences(ref object obj, EntityRemapUtility.EntityRemapInfo *entityRemapInfo) { m_Info = entityRemapInfo; m_PrefabSrc = null; m_PrefabDst = null; m_PrefabCount = 0; m_References?.Clear(); GetPropertyBag(obj).Accept(this, ref obj); }
public void ClearSequences() { _initSequences?.Clear(); _scheduleSequences?.Clear(); _schedulingSequences?.Clear(); _scheduledSequences?.Clear(); _scheduleSequencesEndFrame = null; StartFrame = 0; DurationFrame = 1; }
// filter out by if targets are within range public void FilterEntities(BattleEntity sourceEntity, HashSet<BattleEntity> entities) { // first row filter PCCharacter.RowPosition rowPosition = PCCharacter.RowPosition.FRONT; switch(mRowCondition) { case AISkillRule.RowCondition.BACK_COUNT_GT: case AISkillRule.RowCondition.BACK_COUNT_LT: rowPosition = PCCharacter.RowPosition.BACK; break; case AISkillRule.RowCondition.FRONT_COUNT_GT: case AISkillRule.RowCondition.FRONT_COUNT_LT: rowPosition = PCCharacter.RowPosition.FRONT; break; case AISkillRule.RowCondition.MIDDLE_COUNT_GT: case AISkillRule.RowCondition.MIDDLE_COUNT_LT: rowPosition = PCCharacter.RowPosition.MIDDLE; break; } // then remove all entries are not that row entities.RemoveWhere(delegate(BattleEntity obj) { if(obj is PCBattleEntity && ((PCBattleEntity)obj).pcCharacter.rowPosition == rowPosition) { return false; } return true; }); // finally see if it meets the condition, if it does, leave them, if it doesnt, remove all switch(mRowCondition) { case AISkillRule.RowCondition.BACK_COUNT_GT: case AISkillRule.RowCondition.FRONT_COUNT_GT: case AISkillRule.RowCondition.MIDDLE_COUNT_GT: if(entities.Count > mFilterCount) { // ok } else { entities.Clear(); } break; case AISkillRule.RowCondition.BACK_COUNT_LT: case AISkillRule.RowCondition.FRONT_COUNT_LT: case AISkillRule.RowCondition.MIDDLE_COUNT_LT: if(entities.Count < mFilterCount) { // ok } else { entities.Clear(); } break; } }
public bool IsValidSudoku(int[,] board) { HashSet<int> hset = new HashSet<int>(); // row validation for (int r = 0; r < 9; r++) { hset.Clear(); for (int c = 0; c < 9; c++) { int cell = board[c, r]; if (cell != 0 && !hset.Add(cell)) { return false; } } } // column validation for (int c = 0; c < 9; c++) { hset.Clear(); for (int r = 0; r < 9; r++) { int cell = board[c, r]; if (cell != 0 && !hset.Add(cell)) { return false; } } } // 9 blocks validation for (int b = 0; b < 9; b++) { int cStart = (b % 3) * 3; int rStart = (b / 3) * 3; hset.Clear(); for (int c = cStart; c < cStart + 3; c++) { for (int r = rStart; r < rStart + 3; r++) { int cell = board[c, r]; if (cell != 0 && !hset.Add(cell)) { return false; } } } } return true; }
public static Automaton CreateAutomaton(Grammar g) { //initialise to closure of start item HashSet<ParseState> states = new HashSet<ParseState>(); states.Add(g.Productions.Where(a => a.Head.Equals(g.Root)).Select(a => new Item(a, 0)).Closure(g)); HashSet<ParseStateTransition> transitions = new HashSet<ParseStateTransition>(); HashSet<ParseState> sToAdd = new HashSet<ParseState>(); HashSet<ParseStateTransition> tToAdd = new HashSet<ParseStateTransition>(); do { sToAdd.Clear(); tToAdd.Clear(); foreach (var state in states) { foreach (var item in state) { if (item.Production.Body.Length == item.Position) continue; BnfTerm term = item.Production.Body[item.Position]; ParseState j = state.Goto(term, g); sToAdd.Add(j); tToAdd.Add(new ParseStateTransition(state, term, j)); } } } while (states.UnionWithAddedCount(sToAdd) != 0 | transitions.UnionWithAddedCount(tToAdd) != 0); return new Automaton(transitions, g); }
public int GetPrefixSuffixSetCount(int[] set) { int output = 0; var prefixesNumbersUsed = new HashSet<int>(); var suffixNumbersUsed = new HashSet<int>(); foreach (var rangePair in GetPrefixSuffixIndexRanges(set)) { var prefixRange = rangePair.Prefix; var suffixRange = rangePair.Suffix; prefixesNumbersUsed.Add(prefixRange.Number); suffixNumbersUsed.Add(suffixRange.Number); if (prefixesNumbersUsed.SetEquals(suffixNumbersUsed)) { //No need to keep comparing values that worked already prefixesNumbersUsed.Clear(); suffixNumbersUsed.Clear(); output += (prefixRange.Range * suffixRange.Range); } if (output > TOO_LARGE) return TOO_LARGE; } return output; }
public void Constructor_shall_not_call_within_short_timeframe_to_generate_unique_information() { using (new IndirectionsContext()) { // Arrange var seeds = new HashSet<int>(); PRandom.ConstructorInt32().Body = (@this, seed) => { IndirectionsContext.ExecuteOriginal(() => { var ctor = typeof(Random).GetConstructor(new[] { typeof(int) }); ctor.Invoke(@this, new object[] { seed }); }); seeds.Add(seed); }; new Random(); // preparing JIT seeds.Clear(); // Act var vil1 = new Village(); Thread.Sleep(TimeSpan.FromSeconds(1)); var vil2 = new Village(); // Assert Assert.AreEqual(2, seeds.Count); } }
public int Execute() { var result = 0; _options.Reports.Information.WriteLine("List dependencies for {0} ({1})", _options.Project.Name, _options.Project.ProjectFilePath); var frameworks = new HashSet<FrameworkName>(_options.Project.GetTargetFrameworks().Select(f => f.FrameworkName)); if (_options.Framework != null) { if (frameworks.Contains(_options.Framework)) { frameworks.Clear(); frameworks.Add(_options.Framework); } else { _options.Reports.Error.WriteLine("Project doesn't support framework: {0}", _options.Framework.FullName); return 0; } } foreach (var framework in frameworks) { _options.Reports.Information.WriteLine("[Target framework {0}]", framework.Identifier.ToString()); var operation = new DependencyListOperation(_options, framework); if (!operation.Execute()) { _options.Reports.Error.WriteLine("There was an error listing the dependencies"); return 3; } } return result; }
public static void UnRepeat(string file) { var fi = new FileInfo(file); if (fi.Exists == false) return; HashSet<int> hs = new HashSet<int>(); int repeat = 0; using (StreamWriter sw = new StreamWriter(fi.FullName + "U.txt")) { using (var sr = new StreamReader(file)) { string line = sr.ReadLine(); while (line != null) { int hash = line.GetHashCode(); if (!hs.Contains(hash)) { hs.Add(hash); sw.WriteLine(line); } line = sr.ReadLine(); } } } hs.Clear(); }
public static ParseState Closure(this IEnumerable<Item> items, Grammar grammar) { HashSet<Item> closure = new HashSet<Item>(items); HashSet<Item> toAdd = new HashSet<Item>(); do { toAdd.Clear(); foreach (var item in closure) { if (item.Position == item.Production.Body.Length) continue; BnfTerm term = item.Production.Body[item.Position]; if (term is NonTerminal) { NonTerminal nonTerm = term as NonTerminal; foreach (var production in grammar.Productions.Where(a => a.Head.Equals(nonTerm))) toAdd.Add(new Item(production, 0)); } } } while (closure.UnionWithAddedCount(toAdd) > 0); return new ParseState(closure, IsAcceptingState(closure, grammar)); }
internal void ClearAffixCaches() { affixStringsCached? .Clear(); affixDefDictCached? .Clear(); affixStringsDictCached?.Clear(); modifiersCached? .Clear(); ttlAffixPoints = null; verbProperties = null; tools = null; overlayIcon = null; uiIcon = null; // Clear the VerbTracker cache var equippable = parent.TryGetComp <CompEquippable>(); if (equippable != null) { // [Reflection] equippable.VerbTracker.verbs = null FieldInfo verbsField = AccessTools.Field(typeof(VerbTracker), "verbs"); verbsField.SetValue(equippable.VerbTracker, null); } }
private ulong GetBetweenCount(int max) { ulong count = 0; HashSet<int> seived = new HashSet<int>(); for (int denominator = 2; denominator <= max; denominator++) { seived.Clear(); for (int numerator = 1; numerator < denominator; numerator++) { if (!seived.Contains(numerator)) { if ((denominator % numerator != 0 || numerator == 1) && IsFirstHigher(numerator, denominator, _numeratorBegin, _denominatorBegin) && IsFirstHigher(_numeratorEnd, _denominatorEnd, numerator, denominator)) { count++; } if (denominator % numerator == 0 && numerator != 1) { int composite = numerator; do { seived.Add(composite); composite += numerator; } while (composite < denominator); } } } } return count; }
public static int SolveProblem() { HashSet<int> multiples = new HashSet<int>(); //Find multples of 3 below 10 foreach (int i in FindMultiplesBelow(3, 10)) { multiples.Add(i); } foreach (int i in FindMultiplesBelow(5, 10)) { multiples.Add(i); } if (multiples.Aggregate((total, i) => total + i) != 23) { Console.WriteLine("WOOPS -- " + string.Join(", ", multiples)); } multiples.Clear(); //Find multples of 3 below 10 foreach (int i in FindMultiplesBelow(3, 1000)) { multiples.Add(i); } foreach (int i in FindMultiplesBelow(5, 1000)) { multiples.Add(i); } return multiples.Aggregate((total, i) => total + i); }
private void PollMessages(object state) { var copyOfCachedMessages = new HashSet<Message>(_messageCache); var sinceId = copyOfCachedMessages.OrderByDescending(x => x.Created).First().IdValue; var messageService = CreateBaseMessageService.Invoke(); var messages = messageService.GetMessagesSinceAsync(sinceId).Result.ToList(); if (messages.Count == 0) return; foreach (var message in messages) { copyOfCachedMessages.Add(message); } if (copyOfCachedMessages.Count > MaxCacheCount) { var reducedCachedMessages = copyOfCachedMessages .OrderByDescending(x => x.Created) .Take(MaxCacheCount); copyOfCachedMessages.Clear(); foreach (var message in reducedCachedMessages) { copyOfCachedMessages.Add(message); } } lock (_padlock) { _messageCache = copyOfCachedMessages; } }
[Test] // See Bug #135 public void Ensure_To_Not_Generate_Identical_Keys() { var passPhrase = "test"; var privKeySet = new HashSet<string>(); var pubKeySet = new HashSet<string>(); // add well known key privKeySet.Add( "MIICITAjBgoqhkiG9w0BDAEDMBUEEF5Fx1gxrWd+0G10a7+UbxQCAQoEggH4SUUim2C3kcHApCKVgIeXpKlZQHcaRgfWt0rVEWr8zRnzO9xT5itU2Sw7j0N3oh6cPer/QGNCmAgnRyiDatruJznDPOmMzK5Yskj6mlCaY6JEjcol+E4SBZJDgvIejy8HVCy+DOIR42JXs9oxgeq8eqB+0RZwvDMBG2hrUjnZ4/hPKRPJY134cqTH68jLv6SXglIPcrL9OxOwdzJBaq0ngSBfqhBWbLRIy/Th2btl9Q/0b+sZxG6r2b80wOxIewlr6EUqXtMaA8Bo5dgVZt1itWYafIAbLWzjZavwdO+EkUMCjZhsfvbXSCmcLRmitdJ6beG7jg7R6m6Q92DpU3qZhEio9akX3MQmOTO63Er4T2t6HHYnTzPaZPjdn8D+8lcTUntp/0vD8SvC3+Cb7tZOHSVGMUDdj7WIW+Bl/5bhdmnChE83HSxR4OsBjLATuZOpYtOefWbXyT8qsUn1IouaCjH+BYejBIPrmFVVl0WZADtbyE0LAOyHCD2quAjCpIwXXONG/gXm+XVGst5clbcuxaG4TxKWA8ifIXaio3aJgLfI+D0Izt2GscKRg6oGTlbC3YFIJg+PAH3A4qufoRSPmtREz0oR1X1ZsS6m/IKezf8vl3S+fSpmR/mUuc6uBx9qI9yJIEW/In90r5vO9fKGusEElP6svlub"); pubKeySet.Add( "MIIBKjCB4wYHKoZIzj0CATCB1wIBATAsBgcqhkjOPQEBAiEA/////wAAAAEAAAAAAAAAAAAAAAD///////////////8wWwQg/////wAAAAEAAAAAAAAAAAAAAAD///////////////wEIFrGNdiqOpPns+u9VXaYhrxlHQawzFOw9jvOPD4n0mBLAxUAxJ02CIbnBJNqZnjhE50mt4GffpAEIQNrF9Hy4SxCR/i85uVjpEDydwN9gS3rM6D0oTlF2JjClgIhAP////8AAAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABNVLQ1xKY80BFMgGXec++Vw7n8vvNrq32PaHuBiYMm0PEj2JoB7qSSWhfgcjxNVJsxqJ6gDQVWgl0r7LH4dr0KU="); for (int i = 0; i < 100; i++) { var keyGenerator = new KeyGenerator(256); //default key size var pair = keyGenerator.GenerateKeyPair(); var privateKey = pair.ToEncryptedPrivateKeyString(passPhrase); var publicKey = pair.ToPublicKeyString(); Assert.That(privKeySet.Add(privateKey), Is.True); Assert.That(pubKeySet.Add(publicKey), Is.True); } privKeySet.Clear(); pubKeySet.Clear(); }
protected override IScope[] Select(List<IScope> scopes) { IRandom random = RandomParameter.ActualValue; int children = ChildrenParameter.ActualValue.Value; int parents = ParentsPerChildParameter.ActualValue.Value; int parentsAvailable = scopes.Count; if (parents > parentsAvailable) throw new InvalidOperationException("WithoutRepeatingBatchedRandomSelector: Cannot select more parents per child than there are parents available"); IScope[] result = new IScope[children * parents]; int count = 0; HashSet<int> selectedParents = new HashSet<int>(); for (int i = 0; i < children; i++) { selectedParents.Clear(); for (int j = 0; j < parents; j++) { int nextParent = j; // will be used in case parents == parentsAvailable if (parents < parentsAvailable) { do { nextParent = random.Next(parentsAvailable); } while (selectedParents.Contains(nextParent)); } result[count++] = (IScope)scopes[nextParent].Clone(); selectedParents.Add(nextParent); } } return result; }
public void Scan() { if (_db == null || FindByUserDatabase.DatabaseUnavailable) return; var sessionId = _session.Id; var currentDate = new DateTime(2000, 1, 1); var users = new HashSet<string>(); // Iterate across all messages in this session fragment foreach (var message in _session.GetMessages()) { // Handle the case of sessions that span multiple days var timestamp = message.Timestamp.ToLocalTime(); if (timestamp.Date != currentDate) { // Update the database with the users associated with this session on this date if (users.Count > 0) { _db.AddUsers(sessionId, currentDate, users); users.Clear(); } currentDate = timestamp.Date; } users.Add(message.UserName); } // Update the database with the users associated with the last day of this session fragment if (users.Count > 0) { _db.AddUsers(sessionId, currentDate, users); } }
public virtual ICollection<int> Intersection(IList<IList<int>> lists) { HashSet<int> A = new HashSet<int> (); HashSet<int> B = new HashSet<int> (); var L = lists[0]; var count1 = L.Count; for (int i = 0; i < count1; ++i) { A.Add( L[i] ); } for(int s = 1; s < lists.Count; ++s) { L = lists[s]; count1 = L.Count; for (int i = 0; i < count1; ++i) { var pos = L[i]; if (A.Contains (pos)) { B.Add (pos); } } var C = A; A = B; B = C; B.Clear(); } return A; }
public void FilterEntities(BattleEntity sourceEntity, HashSet<BattleEntity> entities) { // leftover targets should already be in the party, we should just count and filter out the rest if its needed switch(mPartyCondition) { case AISkillRule.PartyCondition.PARTY_COUNT_GT: if(entities.Count <= mPartyCount) { entities.Clear(); } break; case AISkillRule.PartyCondition.PARTY_COUNT_LT: if(entities.Count >= mPartyCount) { entities.Clear(); } break; } }
public void Dispose() { SubscriberAdded = null; SubscriberRemoved = null; sceneObjects?.Clear(); }
/// <summary> /// first construct a prime list up to 1k(using seive) /// and check each number after 647 to see if it matches with criteria given by the question /// 1. for each number, first find out all the prime factors, and add them to a hashset /// 2. if prime factors added are not exactly 4, reset the hashset /// 3 (start variable is used to make sure the numbers are continuous, if there is a gap between numbers, reset the hashset) /// 4. if prime factors added are exactly 4, check the hashset count, if it is 16 return the answer /// 5. if it is not 16, continues to next number /// </summary> /// <returns></returns> static int brute_force_test_each_number_for_match() { var max = 1000; //randomly chose, it works :P var bound = (int)Math.Sqrt(max); bool[] primes = new bool[max]; primes[0] = true; primes[1] = true; int s, m; for (s = 2; s <= bound; s++) { if (primes[s] == false) { for (m = s * s; m < max; m += s) { primes[m] = true; } } } var factor = 4; var start = 0; var num = 0; var pwr = 1; var set = new HashSet<int>(); var count = 1; for (s = 647; s < 200000; s++) { num = s; for (m = 2; m < max; m++) { if (primes[m] == false) { pwr = 1; while (num % m == 0) { pwr *= m; num /= m; } if (pwr != 1) set.Add(pwr); if (num <= 1) break; } } if (set.Count == factor * count && (s == start + 1 || start == 0)) { if (count == factor) return s - 3; start = s; count++; } else { set.Clear(); count = 1; start = 0; } } return 0; }
protected override void DisposeOverride() { DiagnosticsWriter.WriteInformation("Disconnecting from device"); var resources = mResources?.ToArray() ?? new HardwareResource[0]; if (resources.Length > 0) { DiagnosticsWriter.WriteWarning("The is being disconnected from but there are still {0} associated resource(s) allocated. The resources will be disposed to avoid memory leaks", resources.Length); foreach (var resource in resources) { resource?.Dispose(); } } mResources?.Clear(); mResources = null; System.Diagnostics.Debug.Assert(!mDevice.Disposed); mDevice?.Dispose(); mFactory?.Dispose(); mDevice = null; mFactory = null; DiagnosticsWriter.WriteInformation("Disconnection completed."); }
public void CustomizeCodeDom(CodeCompileUnit codeUnit, IServiceProvider services) { var types = codeUnit.Namespaces[0].Types; var attributes = new HashSet<string>(); foreach (var type in types.Cast<CodeTypeDeclaration>(). Where(type => type.IsClass && !type.IsContextType())) { attributes.Clear(); var @struct = new CodeTypeDeclaration { Name = AttributeConstsStructName, IsStruct = true, TypeAttributes = TypeAttributes.Public }; foreach (var member in from CodeTypeMember member in type.Members let prop = member as CodeMemberProperty where prop != null select prop) { CreateAttributeConstForProperty(@struct, member, attributes); } if (attributes.Any()) { type.Members.Insert(0, GenerateTypeWithoutEmptyLines(@struct)); } } }
public IndexedArgInstances(List<ArgInstance> instances) { _mandatoryFullNames = new HashSet<string>(); _argInstancesByName = new MultiDictionary<string, ArgInstance>(); HashSet<string> tempNames = new HashSet<string>(); foreach (ArgInstance currentInstance in instances) { if (currentInstance.Arg.IsMandatory) { _mandatoryFullNames.Add(currentInstance.FullName); } tempNames.Clear(); tempNames.Add(currentInstance.Name); tempNames.Add(currentInstance.FullName); if (currentInstance.ShortName != null) { tempNames.Add(currentInstance.ShortName); } foreach (string name in tempNames) { _argInstancesByName.Add(name, currentInstance); } } }
static void TestHashSet () { HashSet <Item> h= new HashSet <Item> (); int initial = Item.Count; Item itemA = new Item (); Item itemB = new Item (); Item itemC = new Item (); h.Add (itemA); h.Add (itemB); h.Add (itemC); ForceCollect (); Assert.AreEqual (initial + 3, Item.Count, "#C1"); initial = Item.Count; h.Remove (itemB); itemB = null; ForceCollect (); Assert.AreEqual (initial - 1, Item.Count, "#C2"); initial = Item.Count; h.Clear (); itemA = null; itemC = null; ForceCollect (); Assert.AreEqual (initial - 2, Item.Count, "#C3"); }
private void recalculate(out byte numRight, ref HashSet<ushort> right, out byte numLeft, ref HashSet<ushort> left, out bool hasStraightSegment, ref HashSet<ushort> straight, ushort nodeId) { numRight = 0; hasStraightSegment = false; numLeft = 0; right.Clear(); left.Clear(); straight.Clear(); if (nodeId == 0) return; NetNode node = Singleton<NetManager>.instance.m_nodes.m_buffer[nodeId]; ItemClass connectionClass = Singleton<NetManager>.instance.m_segments.m_buffer[segmentId].Info.GetConnectionClass(); for (var s = 0; s < 8; s++) { var otherSegmentId = node.GetSegment(s); if (otherSegmentId == 0 || otherSegmentId == segmentId) continue; ItemClass otherConnectionClass = Singleton<NetManager>.instance.m_segments.m_buffer[otherSegmentId].Info.GetConnectionClass(); if (otherConnectionClass.m_service != connectionClass.m_service) continue; if (TrafficPriority.IsRightSegment(segmentId, otherSegmentId, nodeId)) { right.Add(otherSegmentId); ++numRight; } else if (TrafficPriority.IsLeftSegment(segmentId, otherSegmentId, nodeId)) { left.Add(otherSegmentId); ++numLeft; } else { straight.Add(otherSegmentId); hasStraightSegment = true; } } }
public override QState Initialize() { WriteOutput("Dimensions: " + width + "x" + height); self = new Point(start.X, start.Y); score = 0; walls = new Point[] { }; // Generate Walls Randomly if (wallDensity > 0) { HashSet<Point> bestPathSoFar = new HashSet<Point>(); WriteOutput("Generating walls randomly with density target of " + wallDensity + "..."); for (int w = 0; w < width; w++) { for (int h = 0; h < height; h++) { double r = random.NextDouble(); //WriteOutput("Wall Probability for " + w + "x" + h + ": " + r+" vs threshold "+walls); if (r < wallDensity) { //WriteOutput("Wall created at " + w + "x" + h); Point newWall = new Point(w, h); if (start == newWall || goal == newWall) continue; Point[] tempWalls = walls.Concat(new Point[] { newWall }).ToArray(); QState tempState = new Maze() { maze = maze, self = self, goal = goal, width = width, height = height, walls = tempWalls}; if (!bestPathSoFar.Any() || bestPathSoFar.Contains(newWall)) { QSearchResult path = new QSearch().AStar(tempState); if (path != null) { bestPathSoFar.Clear(); foreach (QState q in path.QStatesList) bestPathSoFar.Add(((Maze)q).self); walls = tempWalls; } } else walls = tempWalls; } } } WriteOutput("Maze generation complete."); } opponent = new List<Point>(); for (int i = 0; i < opponents; i++) opponent.Add(new Point(goal.X, goal.Y)); if (!HideOutput) { ManualResetEvent wait = new ManualResetEvent(false); ThreadPool.QueueUserWorkItem(new WaitCallback(CreateGUI), new object[] { width, height, self.X, self.Y, goal.X, goal.Y, walls, this, wait }); wait.WaitOne(); } return new Maze() { width = width, height = height, self = self, goal = goal, walls = walls, wallDensity = wallDensity, opponent = opponent, opponentDifficulty = opponentDifficulty, random = random, maze = maze, start = start, opponents = opponents, bestOpponentPath = bestOpponentPath, score = score }; }
public void Clear() { var hashSet = new HashSet<int> { 1, 2, 3 }; hashSet.Clear(); Assert.AreEqual(0, hashSet.Count); }
protected void Dispose(bool disposing) { foreach (var tag in _registryTags.MaybeEnumerate()) { RegistryWatcher.Instance.Remove(tag); } _registryTags?.Clear(); }
public override void SceneEnd(Scene scene) { // make sure references to the previous level don't leak if hot reloading inside of a trigger. orig_SceneEnd(scene); triggersInside?.Clear(); temp?.Clear(); level = null; }
public static void Reset() { _craftableProducts?.Clear(); _craftableProducts = null; _thingsOnMap?.Clear(); _thingsOnMap = null; }
public void ProvideAdvices(Weaver codeWeaver) { // Gets the dictionary of custom attributes. AnnotationRepositoryTask customAttributeDictionary = AnnotationRepositoryTask.GetTask(this.Project); // Requests an enumerator of all instances of our Singleton. IEnumerator<IAnnotationInstance> customAttributeEnumerator = customAttributeDictionary.GetAnnotationsOfType(typeof (SingletonAttribute), true); ICollection<TypeDefDeclaration> singletons = new HashSet<TypeDefDeclaration>(); // For each instance of our Singleton. while (customAttributeEnumerator.MoveNext()) { // Gets the type to which it applies. TypeDefDeclaration typeDef = customAttributeEnumerator.Current.TargetElement as TypeDefDeclaration; if (typeDef != null && !singletons.Contains(typeDef)) { singletons.Add(typeDef); codeWeaver.AddTypeLevelAdvice(new SingletonAccessorAdvice(typeDef), JoinPointKinds.BeforeStaticConstructor, new Singleton<TypeDefDeclaration>(typeDef)); codeWeaver.AddMethodLevelAdvice(new SingletonAdvice(typeDef), null, JoinPointKinds.InsteadOfNewObject, new Singleton<MetadataDeclaration>( typeDef.Methods.GetOneByName(".ctor"))); } } singletons.Clear(); foreach (AssemblyRefDeclaration assembly in this.Project.Module.AssemblyRefs) { foreach (TypeRefDeclaration type in assembly.TypeRefs) { TypeDefDeclaration def = type.GetTypeDefinition(); foreach (CustomAttributeDeclaration att in def.CustomAttributes) { if (Equals(att.Constructor.DeclaringType.GetSystemType(new Type[] {}, new Type[] {}), typeof (SingletonAttribute))) { singletons.Add(def); } } } } foreach (TypeDefDeclaration type in singletons) { codeWeaver.AddMethodLevelAdvice(new SingletonAdvice(type), null, JoinPointKinds.InsteadOfNewObject, new Singleton<MetadataDeclaration>(type.Methods.GetOneByName(".ctor"))); } }
public void HashSetShouldClearElementCorrectly() { var set = new HashSet<int>(); set.Add(0); set.Clear(); Assert.IsFalse(set.Find(0)); Assert.AreEqual(0, set.Count); }
private static IEnumerable<CompAir> ContiguousAirBuildings( Building root, NetLayer layer ) { closedSet.Clear(); currentSet.Clear(); openSet.Add( root ); do { //Move all opened to closed foreach (var current in openSet) { closedSet.Add( current ); } var tempSet = currentSet; currentSet = openSet; openSet = tempSet; openSet.Clear(); foreach (var things in currentSet.SelectMany( openBuilding => GenAdj.CellsAdjacentCardinal( openBuilding ) .Select( openCells => openCells.GetThingList() ) ) ) { //All adjacent things foreach (var current in things) { var building = current as Building; var compAir = building?.TryGetComp< CompAir >(); //No adjacent CompAir if (compAir == null) { continue; } //CompAir is not on the same layer if (!compAir.IsLayerOf( layer )) { continue; } //Already swept through if (openSet.Contains( building ) || currentSet.Contains( building ) || closedSet.Contains( building )) { continue; } openSet.Add( building ); break; } } } while (openSet.Count > 0); return from b in closedSet select b.TryGetComp< CompAir >(); }
public void Dispose() { MovesSet?.Clear(); OldMovesSet?.Clear(); Enters?.Clear(); Leaves?.Clear(); }
public void Dispose() { MovesSet?.Clear(); MoveOnlySet?.Clear(); EntersSet?.Clear(); LeavesSet?.Clear(); }
public static int Main(string[] args) { List<List<string>> tags = new List<List<string>>(); List<string> lines = new List<string>(); Stream s = File.Open("todos2",FileMode.Open,FileAccess.Read); TextReader tr = new StreamReader(s); string line = tr.ReadLine(); string text; while(line != null) { tags.Add(ParseTags(line,out text)); lines.Add(text); line = tr.ReadLine(); } tr.Close(); s.Close(); StringDistanceMetric[] metrics = {new LevenshteinWordDistanceMetric(),new CosineDistanceMetric(),new DiceDistanceMetric(),new JacardDistanceMetric(), new EuclidDistanceMetric()}; foreach(StringDistanceMetric metric in metrics) { double max = double.NegativeInfinity; for(int i = 0; i < lines.Count; i++) { for(int j = i+1; j < lines.Count; j++) { max = Math.Max((double) metric.GetDistance(lines[i],lines[j]),max); } } s = File.Open("DistanceMetricResults.dat",FileMode.Create,FileAccess.Write); TextWriter tw = new StreamWriter(s); HashSet<string> xdiff = new HashSet<string>(); int ndx = 20; int ndy = 40; int[] vari = new int[ndx+1]; int[,] chart = new int[ndx+1,ndy+1]; for(int i = 0; i < lines.Count; i++) { for(int j = i+1; j < lines.Count; j++) { xdiff.Clear(); xdiff.UnionWith(tags[i]); xdiff.IntersectWith(tags[j]); double xdistance = 2.0d*xdiff.Count/(tags[i].Count+tags[j].Count); double distance = (double) metric.GetDistance(lines[i],lines[j]); chart[(int) Math.Floor(xdistance*ndx),(int) Math.Floor(distance*ndy/max)]++; vari[(int) Math.Floor(xdistance*ndx)]++; } } for(int i = 0; i <= ndx; i++) { if(vari[i] == 0) { vari[i] = 1; } for(int j = 0; j <= ndy; j++) { tw.WriteLine("{0}\t{1}\t{2}",((double) i/ndx).ToString(nfi),((double) j*max/ndy).ToString(nfi),((double) chart[i,j]/vari[i]).ToString(nfi)); } tw.WriteLine(); } tw.Close(); s.Close(); } return 0; }
internal async Task <bool> LoadCommandsAsync <T>() where T : IShellCommand { if (!Directory.Exists(Constants.COMMANDS_PATH)) { Directory.CreateDirectory(Constants.COMMANDS_PATH); } AssemblyCollection?.Clear(); AssemblyCollection = LoadAssemblies(); if (AssemblyCollection == null || AssemblyCollection.Count <= 0) { Logger.Trace("No command assemblies found."); return(false); } await LoadSync.WaitAsync().ConfigureAwait(false); try { ConventionBuilder conventions = new ConventionBuilder(); conventions.ForTypesDerivedFrom <T>().Export <T>(); ContainerConfiguration configuration = new ContainerConfiguration().WithAssemblies(AssemblyCollection, conventions); using CompositionHost container = configuration.CreateContainer(); List <T> list = container.GetExports <T>().ToList(); if (list.Count <= 0) { return(false); } foreach (T command in list) { if (await IsExistingCommand <T>(command.UniqueId).ConfigureAwait(false)) { Logger.Warning($"'{command.CommandName}' shell command already exists. skipping..."); continue; } await command.InitAsync().ConfigureAwait(false); Interpreter.Commands.Add(command.CommandKey, command); Logger.Info($"Loaded external shell command -> {command.CommandName}"); } return(true); } catch (Exception e) { Logger.Exception(e); return(false); } finally { LoadSync.Release(); } }
public void FixCases(SwitchStatement theSwitch) { DefaultCase defaultCase = theSwitch.Cases.LastOrDefault() as DefaultCase; StatementType defaultCaseStatementType = StatementType.None; string defaultCaseGotoLabel = null; if (defaultCase != null && !TryGetSimpleCaseStatementType(defaultCase, out defaultCaseStatementType, out defaultCaseGotoLabel)) { return; } HashSet<SwitchCase> casesToRemove = new HashSet<SwitchCase>(); List<SwitchCase> allCases = new List<SwitchCase>(theSwitch.Cases); foreach (SwitchCase @case in allCases) { if (@case == defaultCase) { break; } if (@case.Body == null) { casesToRemove.Add(@case); continue; } StatementType caseStatementType; string caseGotoLabel; if (TryGetSimpleCaseStatementType(@case, out caseStatementType, out caseGotoLabel)) { if (defaultCase != null) { if (defaultCaseStatementType == caseStatementType && defaultCaseGotoLabel == caseGotoLabel) { casesToRemove.Add(@case); break; } } else { if (caseStatementType == StatementType.Break) { casesToRemove.Add(@case); break; } } } casesToRemove.Clear(); } if (casesToRemove.Count > 0) { theSwitch.Cases = allCases.Where(@case => !casesToRemove.Contains(@case)); } }
public void ClearTest() { var set = new HashSet<int>(); var limit = rnd.Next(1, 100); for (int i = 0; i < limit; i++) { set.Add(i); } set.Clear(); Assert.AreEqual(set.Count, 0); }
internal static void Main() { string decorationLine = new string('-', Console.WindowWidth); Console.Write(decorationLine); Console.WriteLine("***Presenting the functionality of the data structure 'Hash set'***"); Console.Write(decorationLine); HashSet<int> years = new HashSet<int>(); Console.WriteLine("---Add operation---"); years.Add(1990); years.Add(1992); years.Add(2013); years.Add(2016); years.Add(2022); Console.WriteLine("Count = " + years.Count); Console.WriteLine(); Console.WriteLine("---Iterator functionality---"); PrintYears(years); Console.WriteLine(); Console.WriteLine("---Contains operation---"); Console.WriteLine("Does years set contain {0}? - {1}", 1992, years.Contains(1992)); Console.WriteLine("Does years set contain {0}? - {1}", 2012, years.Contains(2012)); Console.WriteLine(); Console.WriteLine("---Remove operation---"); Console.WriteLine("Is {0} removed from years set? - {1}", 1996, years.Remove(1996)); Console.WriteLine("Years set count: " + years.Count); Console.WriteLine("Is {0} removed from years set? - {1}", 1990, years.Remove(1990)); Console.WriteLine("Years set count: " + years.Count); Console.WriteLine(); Console.WriteLine("---UnionWith operation---"); int[] yearsToUnionWith = new int[] { 2005, 2009, 2021, 2016, 1992, 2013 }; years.UnionWith(yearsToUnionWith); Console.WriteLine("All years after a union with: {0}", string.Join(", ", yearsToUnionWith)); PrintYears(years); Console.WriteLine("Years set count: " + years.Count); Console.WriteLine(); Console.WriteLine("---IntersectWith operation---"); int[] yearsToIntersectWith = new int[] { 2045, 2025, 2021, 2016, 1999, 2017, 2013 }; years.IntersectWith(yearsToIntersectWith); Console.WriteLine("All years after an intersect with: {0}", string.Join(", ", yearsToIntersectWith)); PrintYears(years); Console.WriteLine("Years set count: " + years.Count); Console.WriteLine(); Console.WriteLine("---Clear operation---"); years.Clear(); Console.WriteLine("Years count after clearing: " + years.Count); }
static void ClearUserInfo() { s_UserInfo = null; s_TokenInfo = null; s_AccessTokenData = null; s_AuthCode = null; s_Purchases?.Clear(); purchasePackageIds?.Clear(); s_PackagesKey = null; s_StartPurchaseRequest = false; s_RequestCheckPurchases = true; }
private void Columns_CollectionChanged(object sender, NotifyCollectionChangedEventArgs e) { if (OwningGrid == null && _owningGrid != null) { _notifyingDataItems?.Clear(); _owningGrid.Columns.CollectionChanged -= new NotifyCollectionChangedEventHandler(Columns_CollectionChanged); _owningGrid.LoadingRow -= OwningGrid_LoadingRow; _owningGrid.UnloadingRow -= this.OwningGrid_UnloadingRow; _owningGrid.CellEditEnded -= OwningGrid_CellEditEnded; _owningGrid = null; } }
public override void SceneEnd(Scene scene) { orig_SceneEnd(scene); // if we are not entering PICO-8 or the Reflection Fall cutscene... if (!(patch_Engine.NextScene is Pico8.Emulator) && !(patch_Engine.NextScene is OverworldReflectionsFall)) { // make sure references to the previous level don't leak if hot reloading inside of a trigger. triggersInside?.Clear(); temp?.Clear(); level = null; } }
internal override void Dispose() { base.Dispose(); skipMatcher = exceptMatcher = null; simulationPrefabs?.Clear(); simulationPrefabs = null; LevelLoader.instance.SetSkippedPrefabs(skippedPrefabs); Array.Clear(skippedPrefabs, 0, skippedPrefabs.Length); //foreach (StreamWriter writer in w) // writer.Dispose(); //Array.Clear(w, 0, w.Length); }
protected override void Dispose(bool disposing) { foreach (var ass in cache) { ass.Value.Dispose(); } cache.Clear(); assemblyData.Clear(); references.Clear(); existingWindowsKitsReferenceAssemblies?.Clear(); base.Dispose(disposing); }
public TagsResult[] GetResult() { Debug.Assert(jobs.Count == 0); Debug.Assert(currentResult.Count == 0); Debug.Assert(snapshotHash is null || snapshotHash.Count == 0); var result = tagsResultList.ToArray(); tagsResultList.Clear(); currentResult.Clear(); snapshotHash?.Clear(); return(result); }
protected override void UnloadData() { Enabled = false; asteroidModule = null; planetModule = null; m_existingObjectSeeds?.Clear(); m_toTrackedEntities?.Clear(); m_trackedEntities?.Clear(); Static = null; }
public bool StopCaptureService() { _hearBeatDisposable?.Dispose(); _processNameDisposable?.Dispose(); try { lock (_listLock) _presentMonProcesses?.Clear(); TryKillPresentMon(); return(true); } catch { return(false); } }
public object Stop(Type t, object result, bool mappingWithResolvedTarget) { Debug.Assert(t != null); Debug.Assert(_depth >= 1); if (result != null) { if (!mappingWithResolvedTarget) { AddMapping(t, result); } if (!result.GetType().Assembly.IsDynamic&& result is ITestHelperResolvedCallback cb) { if (_created == null) { _created = new List <ITestHelperResolvedCallback>(); } _created.Add(cb); } if (_depth == 1) { if (_fromTypes != null) { foreach (var o in _fromTypes) { _container.Add(o, result); } } if (_created != null && _created.Count > 0) { foreach (var c in _created) { c.OnTestHelperGraphResolved(result); } } if (mappingWithResolvedTarget) { _initialRequestedTypeResult = result; } } } if (--_depth == 0) { _fromTypes?.Clear(); _created?.Clear(); return(_initialRequestedTypeResult); } return(result); }
public void Reset() { id = 0; status = 0; result = null; loader = null; path = null; mode = AssetLoadMode.Async; onCallback = null; m_parents?.Clear(); m_children?.Clear(); m_callbackCount = 0; m_callbackCall = null; }
private static void Reader_CardRemoved(object sender, CardRemovedEventArgs ev) { WriteToLog("Reader_CardRemoved"); try { _card?.Dispose(); _card = null; _cardBadSectors?.Clear(); _keys?.Clear(); } catch (Exception e) { WriteToLog($"Reader_CardRemoved ERROR!!!\r\n {e}"); throw; } }
protected override void UnloadData() { PluginLog.Log("unloading procedural generator data"); Enabled = false; asteroidModule = null; planetModule = null; m_existingObjectSeeds?.Clear(); m_toTrackedEntities?.Clear(); m_trackedEntities?.Clear(); Static = null; }
public void Reset() { cryptoPath = default; mainFilePath = default; outputlocation = default; cfg = new CC(); crypto = default; encryptionType = default; localFiles?.Clear(); localFiles = default; s1 = new Step1(this); s2 = new Step2(this); s3 = new Step3(this); s4 = new Step4(this); s5 = new Step5(this); Viewer.Content = GetPage(step = 0); btP.Visibility = Visibility.Hidden; }
public virtual void Reset() { //Do not clear other fields as they will be ignored actually Fields.Clear(); cities?.Clear(); likes?.Clear(); fnamesIndexes?.Clear(); WillYieldZeroResults = false; _filters = FilterTypes.empty; _sexStatus = SexStatus.None; cityIndex = 0; countryIndex = 0; joinedTo = 0; joinedFrom = 0; birth = 0; _union.Clear(); _intersect.Clear(); }
private void Reset() { if (DebugMode) { if (DebugOutput == null) { DebugOutput = new List <string>(); } if (DebugActions == null) { DebugActions = new PatchDebugActions(); } } Includes?.Clear(); PutOrDeleteCalled = false; ScriptEngine.ResetStatementsCount(); ScriptEngine.ResetTimeoutTicks(); }
private void loadAppList(HashSet <string> myAppList, HashSet <string> appList, HashSet <Glob> globs) { myAppList.Clear(); globs?.Clear(); foreach (var app in appList) { myAppList.Add(app); if (globs != null && (app.Contains('*') || app.Contains('?'))) { string globString = app; try { // Check to see if the glob includes a ** at the beginning. This is required in order to match any partial paths. if (globString[0] != '*' || globString[1] != '*') { globString = Path.Combine("**", globString); } var glob = Glob.Parse(globString, new GlobOptions() { Evaluation = new EvaluationOptions() { CaseInsensitive = true } }); if (glob != null) { globs.Add(glob); } } catch (Exception) { m_logger.Warn("Invalid glob '{0}'. Not adding.", app); } } } }
public long SizeOf <T>(T obj) { if (obj is null) { return(IntPtr.Size); } try { if (Type <T> .IsSealed) { return(GetSizeOfSealedInternal(obj)); } return(GetSizeOfInternal(obj)); } finally { _objectLookup?.Clear(); } }
public void OnDisable() { if (m_classMember != null) { if (m_classMember.TryGetAttributes(out BaseClassAttribute[] attributes)) { foreach (var attribute in attributes) { ((IClassDrawer)attribute.Drawer).OnDisableClass(m_classMember, attribute); } } } m_groupedMembersByGroup?.Clear(); m_drawnGroups?.Clear(); m_foldableGroupedMembersByGroup?.Clear(); m_drawnFoldableGroups?.Clear(); m_membersCache?.ClearCache(); }
public void Clear() { mTable?.Clear(); mOriginal?.Clear(); mEdited?.Clear(); mCurrentClass = null; mCurrentGrade = null; RecentAttReq = new TcpDataSetRequirement(); RecentStuListReq = new TcpDataSetRequirement(); DateControl.SelectedDate = DateTime.Now; ToExcel.IsEnabled = false; Set1.IsEnabled = false; Set2.IsEnabled = false; Set3.IsEnabled = false; Set4.IsEnabled = false; Set5.IsEnabled = false; Set6.IsEnabled = false; Save.IsEnabled = false; Cancel.IsEnabled = false; TopCheckBox.IsChecked = false; }