public void TestForceSingleThreaded() { LosgapSystem.InvokeOnMaster(() => { // Must invoke on master because the PP has assurances that the master thread is invoking operations const int NUM_ITERATIONS = 10000; HashSet <Thread> invokedThreads = new HashSet <Thread>(); ParallelizationProvider pp = new ParallelizationProvider(); for (int i = 0; i < NUM_ITERATIONS; i++) { pp.ForceSingleThreadedMode = true; invokedThreads.Clear(); pp.InvokeOnAll(() => { lock (invokedThreads) { invokedThreads.Add(Thread.CurrentThread); } }, true); Assert.AreEqual(LosgapSystem.MasterThread, invokedThreads.Single()); invokedThreads.Clear(); pp.Execute(100, 1, atomic => { lock (invokedThreads) { invokedThreads.Add(Thread.CurrentThread); } }); Assert.AreEqual(LosgapSystem.MasterThread, invokedThreads.Single()); pp.ForceSingleThreadedMode = false; } }); }
public void MultipleTriggers(bool usePolling) { UsingTempDirectory(dir => { using (var changedEv = new AutoResetEvent(false)) using (var watcher = FileWatcherFactory.CreateWatcher(dir, usePolling)) { var filesChanged = new HashSet <string>(); watcher.OnFileChange += (_, f) => { filesChanged.Add(f); changedEv.Set(); }; watcher.EnableRaisingEvents = true; // On Unix the file write time is in 1s increments; // if we don't wait, there's a chance that the polling // watcher will not detect the change Thread.Sleep(1000); var testFileFullPath = Path.Combine(dir, "foo1"); File.WriteAllText(testFileFullPath, string.Empty); Assert.True(changedEv.WaitOne(DefaultTimeout)); Assert.Equal(testFileFullPath, filesChanged.Single()); filesChanged.Clear(); // On Unix the file write time is in 1s increments; // if we don't wait, there's a chance that the polling // watcher will not detect the change Thread.Sleep(1000); testFileFullPath = Path.Combine(dir, "foo2"); File.WriteAllText(testFileFullPath, string.Empty); Assert.True(changedEv.WaitOne(DefaultTimeout)); Assert.Equal(testFileFullPath, filesChanged.Single()); filesChanged.Clear(); // On Unix the file write time is in 1s increments; // if we don't wait, there's a chance that the polling // watcher will not detect the change Thread.Sleep(1000); testFileFullPath = Path.Combine(dir, "foo3"); File.WriteAllText(testFileFullPath, string.Empty); Assert.True(changedEv.WaitOne(DefaultTimeout)); Assert.Equal(testFileFullPath, filesChanged.Single()); filesChanged.Clear(); // On Unix the file write time is in 1s increments; // if we don't wait, there's a chance that the polling // watcher will not detect the change Thread.Sleep(1000); File.WriteAllText(testFileFullPath, string.Empty); Assert.True(changedEv.WaitOne(DefaultTimeout)); Assert.Equal(testFileFullPath, filesChanged.Single()); } }); }
/// <summary> /// Indicates whether this is a range condition /// </summary> /// <returns></returns> public bool IsRangeCondition() { var between = _operators.Single(i => i.Type == OperatorType.Between); var notBetween = _operators.Single(i => i.Type == OperatorType.NotBetween); return(_group.Value.Contains(between.Value) || _group.Value.Contains(notBetween.Value)); }
public static GameAccount GetGameAccountByDBGameAccount(DBGameAccount dbGameAccount) { if (!LoadedGameAccounts.Any(acc => acc.DBGameAccount.Id == dbGameAccount.Id)) { LoadedGameAccounts.Add(new GameAccount(dbGameAccount)); } return(LoadedGameAccounts.Single(acc => acc.DBGameAccount.Id == dbGameAccount.Id)); }
private IEnumerable <Airport> GetAllBy(string file) { var airports = new HashSet <Airport>(); try { using (var sr = new StreamReader(file)) { string currentLine; while ((currentLine = sr.ReadLine()) != null) { if (string.IsNullOrEmpty(currentLine)) { throw new InvalidCastException("Impossible cast lineFile to Airport class."); } var data = currentLine.Split(','); var origin = new Airport(data[0]); var destination = new Airport(data[1]); var price = decimal.Parse(data[2]); if (airports.Any(a => a.Name.Equals(origin.Name))) { origin = airports.Single(a => a.Name.Equals(origin.Name)); } if (airports.Any(a => a.Name.Equals(destination.Name))) { destination = airports.Single(a => a.Name.Equals(destination.Name)); } origin.ConnectTo(destination, price); airports.Add(origin); airports.Add(destination); } } } catch (System.Exception e) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("The File could not be read:"); Console.WriteLine(e.Message); Console.ResetColor(); } return(airports); }
public CircusProgram(IEnumerable <ProgramNodeDeclaration> programNodes) { var nodeDictionary = new FlexibleDictionary <string, ProgramTreeNode>(); // Register all nodes foreach (var node in programNodes) { nodeDictionary[node.Name] = new(new(node)); } // Declare the relationships var availableChildren = new HashSet <string>(nodeDictionary.Keys); foreach (var node in programNodes) { if (node.IsLeafNode) { continue; } foreach (var child in node.ChildrenNodes) { nodeDictionary[node.Name].AddChild(nodeDictionary[child]); availableChildren.Remove(child); } } // Identify the tree root var rootName = availableChildren.Single(); programTree = new(nodeDictionary[rootName]); }
public void DataIntegrity_NoDuplicateWhereOnlyDifferenceIsNormalVsNonNormalDelivery() { // PG-152/PG-1272: Glyssen now handles duplicates where the only difference is // between normal (blank) delivery and a specified delivery, but if the same // character speaks twice in the same verse with two different deliveries, it's // still genrally best to make them both explicit. We allow for the exception // of an Alternate (where the main speaker is allowed to speak the quotation // rather than having it spoken by the character being quoted), because in this // case the delivery is as much an informational note to the scripter as it is // a delivery for the recording team to attend to. IEqualityComparer <CharacterVerse> comparer = new BcvCharacterEqualityComparer(); ISet <CharacterVerse> uniqueCharacterVerses = new HashSet <CharacterVerse>(comparer); IList <CharacterVerse> duplicateCharacterVerses = new List <CharacterVerse>(); foreach (CharacterVerse cv in ControlCharacterVerseData.Singleton.GetAllQuoteInfo() .OrderBy(cv => cv.BcvRef).ThenBy(cv => string.IsNullOrEmpty(cv.Delivery))) { if (!uniqueCharacterVerses.Add(cv) && string.IsNullOrEmpty(cv.Delivery)) { if (uniqueCharacterVerses.Single(c => comparer.Equals(c, cv)).QuoteType != QuoteType.Alternate) { duplicateCharacterVerses.Add(cv); } } } Assert.False(duplicateCharacterVerses.Any(), "Duplicate Character-Verse data:" + Environment.NewLine + duplicateCharacterVerses.Select(cv => cv.BcvRef + ", " + cv.Character).OnePerLineWithIndent()); }
internal TEntity Add(TEntity entity, EntityState state) { var existingProxy = _cachedEntities.SingleOrDefault(e => ((IEntityProxy)e).GeneratedFrom.Equals(entity)); if (existingProxy != null) { return(existingProxy); } var proxy = entity as IEntityProxy ?? (IEntityProxy)DynamicProxy.Instance.Wrap(entity); entity = (TEntity)proxy; _cachedEntities.Add(entity); proxy.State = state; foreach (var column in Columns.Where(c => c.ReferenceTable != null)) { var reference = column.GetValue(entity); if (reference == null) { continue; } var referenceEntityType = reference.GetType(); var referenceDbSetType = typeof(DbSet <>).MakeGenericType(referenceEntityType); var referenceDbSet = _relatedDbSets.Single(d => d.GetType() == referenceDbSetType); var add = referenceDbSetType.GetMethod("Add", new[] { referenceEntityType }); var referenceProxy = add.Invoke(referenceDbSet, new[] { reference }); column.SetValue(entity, referenceProxy); } return(entity); }
public async Task NewFile(bool usePolling) { if (!usePolling && OperatingSystem.IsMacOS()) { // Skip on MacOS https://github.com/dotnet/aspnetcore/issues/29141 return; } var dir = _testAssetManager.CreateTestDirectory(identifier: usePolling.ToString()).Path; using var watcher = FileWatcherFactory.CreateWatcher(dir, usePolling); var changedEv = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); var filesChanged = new HashSet <string>(); watcher.OnFileChange += (_, f) => { filesChanged.Add(f); changedEv.TrySetResult(); }; watcher.EnableRaisingEvents = true; var testFileFullPath = Path.Combine(dir, "foo"); File.WriteAllText(testFileFullPath, string.Empty); await changedEv.Task.TimeoutAfter(DefaultTimeout); Assert.Equal(testFileFullPath, filesChanged.Single()); }
/// <summary>Given an entity type, returns the corresponding entity set if it's unique.</summary> /// <param name="entityType">Given entity type.</param> /// <returns>Corresponding entity set if it's unique, null otherwise.</returns> private EntitySetBase GetUniqueEntitySetForType(EntityType entityType) { HashSet <EntitySetBase> entitySets = new HashSet <EntitySetBase>(EqualityComparerEntitySet.Default); foreach (EntityContainer container in this.EdmItemCollection.GetItems <EntityContainer>()) { bool alreadyAdded = false; foreach (EntitySetBase es in container.BaseEntitySets .Where(x => x.BuiltInTypeKind == BuiltInTypeKind.EntitySet && x.ElementType == entityType)) { if (alreadyAdded == true) { return(null); } alreadyAdded = true; entitySets.Add(es); } } if (entitySets.Count == 1) { return(entitySets.Single()); } else { return(null); } }
public void MakeProgramEnd() { FindTerminalLocations(); int endLine = ends.Single(); ExecuteUntilLoopOrEnd(0, endLine); }
public void CycleSearch(List <long> numbersUsed, HashSet <int> basesRemaining) { var lastNumber = numbersUsed.Last(); var backTwo = BackTwo(lastNumber); if (!frontTwoToBaseToNumber.ContainsKey(backTwo)) { return; } if (basesRemaining.Count == 1) { var baseNumber = basesRemaining.Single(); if (!frontTwoToBaseToNumber[backTwo].ContainsKey(baseNumber)) { return; } if (frontTwoToBaseToNumber[backTwo][baseNumber].Contains(long.Parse(backTwo + FrontTwo(numbersUsed.First())))) { numbersUsed.Add(long.Parse(backTwo + FrontTwo(numbersUsed.First()))); _cycles.Add(new List <long>(numbersUsed)); } return; } else { foreach (var remainingBase in basesRemaining.Where(b => frontTwoToBaseToNumber[backTwo].ContainsKey(b))) { foreach (var number in frontTwoToBaseToNumber[backTwo][remainingBase]) { CycleSearch(new List <long>(numbersUsed.Concat(new[] { number })), new HashSet <int>(basesRemaining.Except(new[] { remainingBase }))); } } } }
private void PreemptiveWebsocketReplacingTimer_Elapsed(object sender, ElapsedEventArgs e) { logger.LogDebug("PreemptiveWebsocketReplacingTimer_Elapsed() started"); preemptiveWebsocketReplacingTimer.Stop(); WebSocket newWebSocket = new WebSocket(UrlManager.WebSocketUrl); newWebSocket.Log.Output = LogWebsocketOutput; newWebSocket.Connect(); logger.LogDebug("PreemptiveWebsocketReplacingTimer_Elapsed(): new websocket connected"); preemptiveWebsocketReplacingTimer.Start(); WebSocket oldWebSocket = webSocket; webSocket = newWebSocket; SubscribeToCanvas(); if (trackedChunks.Count == 1) { RegisterChunk(trackedChunks.Single()); } else { RegisterMultipleChunks(trackedChunks); } logger.LogDebug("PreemptiveWebsocketReplacingTimer_Elapsed(): event resubscribing"); newWebSocket.OnOpen += WebSocket_OnOpen; oldWebSocket.OnOpen -= WebSocket_OnOpen; newWebSocket.OnMessage += WebSocket_OnMessage; oldWebSocket.OnMessage -= WebSocket_OnMessage; newWebSocket.OnClose += WebSocket_OnClose; oldWebSocket.OnClose -= WebSocket_OnClose; newWebSocket.OnError += WebSocket_OnError; oldWebSocket.OnError -= WebSocket_OnError; (oldWebSocket as IDisposable).Dispose(); }
public static Boolean setTrainTestElements(String selectedItem, HashSet <DatasetElements> mainDataset, HashSet <DatasetImageElements> trainTestDataset, DatasetElements.TypeDataset typeset) { //Trying to access at element and updating HashSets try { var itemBase = mainDataset.Single(x => x.GetFilename().Equals(selectedItem)); DatasetImageElements item = new DatasetImageElements(itemBase); item.SetTypeset(typeset); //setting charater item.SetCharater(DatasetImageElements.Charater.unset); //setting Image vector of pixel code //Updating information in imageDataset HashSet mainDataset.Remove(itemBase); itemBase.SetTypeset(typeset); mainDataset.Add(itemBase); //Adding element in train dataset of images trainTestDataset.Add(item); } catch (Exception e) { return(false); } return(true); }
private void Scan(int parentId, ref int numFilesAdded, DirectoryInfo info) { MediaLibraryEntry directoryEntry = GetOrCreateDirectoryEntry(parentId, info); IEnumerable <FileSystemInfo> fileSystemEntries = info.EnumerateFileSystemInfos("*", SearchOption.TopDirectoryOnly); HashSet <string> albumNames = new HashSet <string>(); foreach (FileSystemInfo info2 in fileSystemEntries) { if (info2 is DirectoryInfo directoryInfo) { if (info2.Name != "__MACOSX") { Scan(directoryEntry.Id, ref numFilesAdded, directoryInfo); } } else if (info2.Name.EndsWith(".mp3")) { bool added = AddOrUpdateFileEntry(directoryEntry.Id, (FileInfo)info2, albumNames); if (added) { numFilesAdded++; } } } if (albumNames.Count == 1) { directoryEntry.Name = albumNames.Single(); Library.UpdateEntry(directoryEntry); } }
public async Task DoNotCallUpdateLicenseLimitsCommandOnEveryLeaderChange() { var(servers, leader) = await CreateRaftCluster(3); await WaitForRaftIndexToBeAppliedInCluster(9, TimeSpan.FromSeconds(15)); var expected = new HashSet <long>(); foreach (var server in servers) { expected.Add(CountOfRaftCommandByType(server, nameof(UpdateLicenseLimitsCommand))); } Assert.Single(expected); for (int i = 0; i < 10; i++) { await ActionWithLeader(l => { l.ServerStore.Engine.CurrentLeader.StepDown(); return(l.ServerStore.Engine.WaitForLeaderChange(l.ServerStore.ServerShutdown)); }); } foreach (var server in servers) { Assert.Equal(expected.Single(), CountOfRaftCommandByType(server, nameof(UpdateLicenseLimitsCommand))); } }
/// <summary> /// Returns an absolute path if every combination of baseDirs + relative return only one existing file. /// </summary> /// <param name="relPath"> /// The reference one uses to import another lua script. Relative in terms of script path, lua /// path or it is an absolute path itself. /// </param> /// <returns>The file path if unique or null, if multiple files are accessible via the relPath param.</returns> public string getPath(string relPath) { //return path, only if a single match exists: //base directories are unique, but constructed directories could reference multiple files e.g.: //1. opening ...workspace\ultrascript.lua makes rel_path = "ultrascript.lua" //2. lua_path = "C:\" //==> reference could either point to workspace\ultrascript.lua or C:\ultrascript.lua try { //building paths HashSet <string> dirs = new HashSet <string>(); foreach (var baseDir in _baseDirs) { dirs.Add(buildPath(baseDir, relPath)); } //check uniqueness | added as function for future enrichment. //if path would point to a dir: hasOneMatch = e => Directory.Exists(e); Func <string, bool> hasOneMatch = e => File.Exists(e); return(dirs.Single(hasOneMatch)); } //Single() throws exception if zero or multiple matches exist catch (InvalidOperationException ex) { #if DEBUG Console.WriteLine(ex.Message); #endif Console.WriteLine(ex.Message); return(null); } }
public CimInstanceRegularFilter(string propertyName, IEnumerable allowedPropertyValues, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch) { var valueBehaviors = new HashSet <BehaviorOnNoMatch>(); foreach (object allowedPropertyValue in allowedPropertyValues) { PropertyValueFilter filter = new PropertyValueRegularFilter( propertyName, allowedPropertyValue, wildcardsEnabled, behaviorOnNoMatch); this.AddPropertyValueFilter(filter); valueBehaviors.Add(filter.BehaviorOnNoMatch); } if (valueBehaviors.Count == 1) { this.BehaviorOnNoMatch = valueBehaviors.Single(); } else { this.BehaviorOnNoMatch = behaviorOnNoMatch; } }
public void RemoveRange(IEnumerable <string> isins) { if (isins is null) { throw new ArgumentNullException(nameof(isins)); } if (!isins.Any()) { _logger.Info("No entry to remove."); return; } if (!_fileContentLoaded) { Load(); } isins .ToList() .ForEach(isin => _entities.Remove(_entities.Single(e => Equals(e.Isin, isin)))); _fileContentSaved = false; _logger.Info($"{isins.Count()} registry item removed."); }
public void Find6and0() { var hs1 = new HashSet <char>(Digits[1].ToCharArray()); var coll = UniqueValues.Where(uv => uv.Key.Length == 6).ToList(); foreach (var uv in coll) { var hs = new HashSet <char>(Digits[8].ToCharArray()); hs.ExceptWith(uv.Key.ToCharArray()); char ch = hs.Single(); if (hs1.Contains(ch)) { Digits.Add(6, uv.Key); C = ch; } else if (Digits[9] == uv.Key) { // 9 already found ; } else { // D D = ch; Digits.Add(0, uv.Key); } } }
private void btnDeleteCommander_Click(object sender, EventArgs e) { var cells = dataGridViewCommanders.SelectedCells; HashSet <int> rowindexes = new HashSet <int>(); foreach (var cell in cells.OfType <DataGridViewCell>()) { if (!rowindexes.Contains(cell.RowIndex)) { rowindexes.Add(cell.RowIndex); } } if (rowindexes.Count == 1) { var row = dataGridViewCommanders.Rows[rowindexes.Single()].DataBoundItem as EDCommander; var result = MessageBox.Show("Do you wish to delete commander " + row.Name + "?", "Delete commander", MessageBoxButtons.YesNo, MessageBoxIcon.Information); if (result == DialogResult.Yes) { EDDConfig.Instance.DeleteCommander(row); _discoveryForm.TravelControl.LoadCommandersListBox(); UpdateCommandersListBox(); _discoveryForm.RefreshHistoryAsync(); // will do a new parse on commander list adding/removing scanners } } }
public List <Account> GetUserList(string searchString) { Require.NotEmpty(searchString, nameof(searchString)); var allProjectMemberships = _projectMembershipRepostiory.GetAllProjectMemberships().ToList(); var allUsers = new HashSet <Account>(_userRepository.GetAllAccounts()); var allUsersToSearchByRole = new HashSet <Account>( allProjectMemberships.Select(membership => allUsers.Single(account => account.UserId == membership.DeveloperId))); var userRolesDictionary = allUsersToSearchByRole.ToDictionary(user => user, user => allProjectMemberships.Where(membership => membership.DeveloperId == user.UserId) .Select(that => that.Role)); return(userRolesDictionary.Where( pair => pair.Value.Any(role => Extensions.Contains(role, searchString))) .Select(pair => pair.Key) .Union( allUsers.Where( account => Extensions.Contains($"{account.Firstname} {account.Lastname}", searchString))).ToList()); }
public void Add(string pattern) { if (pattern == string.Empty) { return; } var chainFactory = new ChainFactory(); var newChainHead = chainFactory.Create(pattern); if (_heads.Contains(newChainHead)) { var head = _heads.Single(x => x.Data == pattern.First()); var traversePathHead = newChainHead.SubNodes.Single(); var enumerator = new EnumeratorTraversingSpecifiedPath(head, traversePathHead); while (enumerator.MoveNext()) { } if (!enumerator.IsDestinationReached && !enumerator.LastTraversedNode.SubNodes.Contains(enumerator.TraverseRemainder)) { enumerator.LastTraversedNode.AppendSub(enumerator.TraverseRemainder); } } else { _heads.Add(newChainHead); } }
public void HashSetExtensions_Single_ThrowsExceptionIfHashSetIsEmpty() { var set = new HashSet <Int32>(); Assert.That(() => set.Single(), Throws.TypeOf <InvalidOperationException>()); }
public virtual async Task <IEnumerable <SourcePackageDependencyInfo> > GetDependenciesAsync(PackageIdentity identity, SourceCacheContext cacheContext) { var packageSourceProvider = new PackageSourceProvider(m_NugetSettings); var sourceRepositoryProvider = new SourceRepositoryProvider(packageSourceProvider, m_Providers); var sourceRepositories = sourceRepositoryProvider.GetRepositories(); var availablePackages = new HashSet <SourcePackageDependencyInfo>(PackageIdentityComparer.Default); await GetPackageDependenciesAsync(identity, cacheContext, sourceRepositories, availablePackages); var resolverContext = new PackageResolverContext( DependencyBehavior.Lowest, new[] { identity.Id }, Enumerable.Empty <string>(), Enumerable.Empty <PackageReference>(), Enumerable.Empty <PackageIdentity>(), availablePackages, sourceRepositoryProvider.GetRepositories().Select(s => s.PackageSource), Logger); return(m_PackageResolver.Resolve(resolverContext, CancellationToken.None) .Select(p => availablePackages.Single(x => PackageIdentityComparer.Default.Equals(x, p))) .Where(d => !m_IgnoredDependendencies.Contains(d.Id))); }
/// <summary> /// Returns the available templates. /// </summary> /// <returns>The response from the GetTemplates API method, as returned by RSign.</returns> public IEnumerable <Template> GetTemplates() { if (!IsAuthenticated) { Authenticate(); } var envelopeType = _envelopeTypes.Single(x => x.Description.Equals("Template", StringComparison.InvariantCultureIgnoreCase)); var response = _httpClient.Get(string.Format("Template/GetConsumableListForEnvelope/{0}", envelopeType.EnvelopeTypeId)); return(JsonConvert .DeserializeObject <TemplateList>(response.Content.ReadAsStringAsync().Result) .Templates .ToList()); }
public static long Solve2(string[] lines) { int Calc(Func <int, int, char> calcRemoveChar) { int[] onesCounts = BuildOnesCounts(lines); HashSet <string> set = lines.ToHashSet(); int i = 0; while (set.Count > 1) { int count = onesCounts[i]; char removeCh = calcRemoveChar(count, set.Count); set.RemoveWhere(x => x[i] == removeCh); onesCounts = BuildOnesCounts(set); i++; } return(Convert.ToInt32(set.Single(), 2)); } int oxygen = Calc((count, setCount) => count * 2 >= setCount ? '0' : '1'); int scrubber = Calc((count, setCount) => count * 2 >= setCount ? '1' : '0'); return(oxygen * scrubber); }
public static Plugin Convert(global::Plugins.Plugin p) { var plugin = new Plugin(p.GetType().FullName); var stepComparer = new StepEqualityComparer(); var stepSet = new HashSet <global::Plugins.Step>(stepComparer); foreach (var s in p.Steps) { if (stepSet.Contains(s)) { var existingStep = stepSet.Single(step => stepComparer.Equals(step, s)); existingStep.Merge(s); continue; } stepSet.Add(s); } foreach (var s in stepSet) { plugin.Steps.Add(StepConverter.ConvertToDeployStep(s)); } return(plugin); }
private static NotSupportedException InvalidCompiledQueryParameterException( Expression expression) { ParameterExpression parameterExpression; if (expression.NodeType == ExpressionType.Parameter) { parameterExpression = (ParameterExpression)expression; } else { HashSet <ParameterExpression> parameters = new HashSet <ParameterExpression>(); EntityExpressionVisitor.Visit(expression, (Func <Expression, Func <Expression, Expression>, Expression>)((exp, baseVisit) => { if (exp != null && exp.NodeType == ExpressionType.Parameter) { parameters.Add((ParameterExpression)exp); } return(baseVisit(exp)); })); if (parameters.Count != 1) { return(new NotSupportedException(Strings.CompiledELinq_UnsupportedParameterTypes((object)expression.Type.FullName))); } parameterExpression = parameters.Single <ParameterExpression>(); } if (parameterExpression.Type.Equals(expression.Type)) { return(new NotSupportedException(Strings.CompiledELinq_UnsupportedNamedParameterType((object)parameterExpression.Name, (object)parameterExpression.Type.FullName))); } return(new NotSupportedException(Strings.CompiledELinq_UnsupportedNamedParameterUseAsType((object)parameterExpression.Name, (object)expression.Type.FullName))); }
/// <summary> /// Get node collection from dataGridView /// </summary> /// <returns>HashSet of nodes</returns> public HashSet <Node> GetNodes() { HashSet <Node> Nodes = new HashSet <Node>(); // Enlist all nodes for (int row = 0; row < dataTable.Rows.Count; row++) { Nodes.Add(new Node((string)dataTable.Rows[row][0], (int)dataTable.Rows[row][1])); } // Create dependencies for (int row = 0; row < dataTable.Rows.Count; row++) { if (dataTable.Rows[row][2] != null) { foreach (string dependency in dataTable.Rows[row][2].ToString().Split(separators, StringSplitOptions.RemoveEmptyEntries)) { Nodes.ElementAt(row).Dependencies.Add(Nodes.Single(new Func <Node, bool>(delegate(Node node) { return(node.ID == dependency); }))); } } } return(Nodes); }
public override void OnExecute(CommandEventArgs e) { GitPushArgs args = new GitPushArgs(); string repositoryRoot; var repositoryRoots = new HashSet<string>(FileSystemUtil.StringComparer); foreach (var projectRoot in GetAllRoots(e)) { if ( GitTools.TryGetRepositoryRoot(projectRoot.FullPath, out repositoryRoot) && !repositoryRoots.Contains(repositoryRoot) ) repositoryRoots.Add(repositoryRoot); } if (repositoryRoots.Count > 1) { throw new InvalidOperationException("Pushing of multiple repository roots is not supported"); } repositoryRoot = repositoryRoots.Single(); switch (e.Command) { case VisualGitCommand.PendingChangesPushSpecificBranch: case VisualGitCommand.PendingChangesPushSpecificTag: if (!QueryParameters(e, repositoryRoot, args)) return; break; } ProgressRunnerArgs pa = new ProgressRunnerArgs(); pa.CreateLog = true; pa.TransportClientArgs = args; GitException exception = null; e.GetService<IProgressRunner>().RunModal(CommandStrings.PushingSolution, pa, delegate(object sender, ProgressWorkerArgs a) { using (var client = e.GetService<IGitClientPool>().GetNoUIClient()) { try { client.Push(repositoryRoot, args); } catch (GitException ex) { exception = ex; } } }); if (exception != null) { e.GetService<IVisualGitErrorHandler>().OnWarning(exception); } }
/// <summary> /// Get node collection from dataGridView /// </summary> /// <returns>HashSet of nodes</returns> public HashSet<Node> GetNodes() { HashSet<Node> Nodes = new HashSet<Node>(); // Enlist all nodes for (int row = 0; row < dataTable.Rows.Count; row++) Nodes.Add(new Node((string)dataTable.Rows[row][0], (int)dataTable.Rows[row][1])); // Create dependencies for (int row = 0; row < dataTable.Rows.Count; row++) if (dataTable.Rows[row][2] != null) foreach (string dependency in dataTable.Rows[row][2].ToString().Split(separators, StringSplitOptions.RemoveEmptyEntries)) Nodes.ElementAt(row).Dependencies.Add(Nodes.Single(new Func<Node, bool>(delegate(Node node) { return node.ID == dependency; }))); return Nodes; }
/// <summary> /// Take a Dictionary of pins and write it to an XML file /// </summary> /// <param name="pinMap">A Dictionary of frame X and Y rotations to a list of pins on that frame</param> /// <param name="outfile">The file to write the formatted XML to</param> public void Pack(Dictionary<Tuple<int, int>, List<Pin>> pinMap, string outfile) { List<FrameData> frames = new List<FrameData>(); HashSet<DissectionPinData> pins = new HashSet<DissectionPinData>(); int nextId = 0; foreach (var pair in pinMap) { List<Pin> framePins = pair.Value; // All the pins on this frame Tuple<int, int> frame = pair.Key; // The 2D rotation of this frame FrameData frameData = new FrameData(); // Data for the frame to be represented in XML frameData.Rotation = new Vector3(frame.Item1, frame.Item2, 0.0f); List<FramePinData> framePinDatas = new List<FramePinData>(); // The links between this frame and its pins foreach (Pin pin in framePins) { DissectionPinData data = ConvertToData(pin); if (pins.Add(data)) { // This pin doesn't already exist, so give it an id data.Id = nextId++; } else { // This pin is in the HashSet so get the original instance data = pins.Single((DissectionPinData x) => { return x.Equals(data); }); } framePinDatas.Add(new FramePinData(new Vector2(pin.X, pin.Y), data.Id)); } frameData.Pins = framePinDatas.ToArray(); frames.Add(frameData); } PinContainer container = new PinContainer(); container.Frames = frames.ToArray(); container.Pins = pins.ToArray(); WriteXml(container, outfile); }
private double CalculateRPNValue(Stack<string> RPNStack, IEnumerable<SolvedCalculatorParameter> parameters) { var resultStack = new Stack<double>(); var parameterSet = new HashSet<SolvedCalculatorParameter>(parameters); while (RPNStack.Count > 0) { var val = RPNStack.Pop(); if (IsNumber(val)) { resultStack.Push(double.Parse(val.Replace('.', ','))); } else if (parameterSet.Any(x => x.Name == val)) { var paramVal = parameterSet.Single(x => x.Name == val); resultStack.Push(paramVal.Value); } else { var op = this.OperatorFactory.GetOperatorByCode(val); if (resultStack.Count < op.ParameterCount) { throw new CalculatorException(string.Format("Not enough parameters for function {0}", op.Name)); } var operatorParams = new List<double>(); for (int i = 0; i < op.ParameterCount; i++) { operatorParams.Add(resultStack.Pop()); } operatorParams.Reverse(); var operationResult = op.DoOperation(operatorParams); resultStack.Push(operationResult); } } if (resultStack.Count > 1) { throw new CalculatorException("The equation has too many values"); } return resultStack.Pop(); }
public void HashSetExtensions_Single_ThrowsExceptionIfHashSetIsEmpty() { var set = new HashSet<Int32>(); Assert.That(() => set.Single(), Throws.TypeOf<InvalidOperationException>()); }
private static NotSupportedException InvalidCompiledQueryParameterException(Expression expression) { ParameterExpression parameterExp; if (expression.NodeType == ExpressionType.Parameter) { parameterExp = (ParameterExpression)expression; } else { // If this is a simple query parameter (involving a single delegate parameter) report the // type of that parameter. Otherwise, report the type of the part of the parameter. HashSet<ParameterExpression> parameters = new HashSet<ParameterExpression>(); EntityExpressionVisitor.Visit(expression, (exp, baseVisit) => { if (null != exp && exp.NodeType == ExpressionType.Parameter) { parameters.Add((ParameterExpression)exp); } return baseVisit(exp); }); if (parameters.Count != 1) { return EntityUtil.NotSupported(Strings.CompiledELinq_UnsupportedParameterTypes(expression.Type.FullName)); } parameterExp = parameters.Single(); } if (parameterExp.Type.Equals(expression.Type)) { // If the expression type is the same as the parameter type, indicate that the parameter type is not valid. return EntityUtil.NotSupported(Strings.CompiledELinq_UnsupportedNamedParameterType(parameterExp.Name, parameterExp.Type.FullName)); } else { // Otherwise, indicate that using the specified parameter to produce a value of the expression's type is not supported in compiled query return EntityUtil.NotSupported(Strings.CompiledELinq_UnsupportedNamedParameterUseAsType(parameterExp.Name, expression.Type.FullName)); } }
private void UpdateSeverityMenuItemsChecked() { _setSeverityErrorMenuItem.Checked = false; _setSeverityWarningMenuItem.Checked = false; _setSeverityInfoMenuItem.Checked = false; _setSeverityHiddenMenuItem.Checked = false; _setSeverityNoneMenuItem.Checked = false; var workspace = TryGetWorkspace() as VisualStudioWorkspaceImpl; if (workspace == null) { return; } HashSet<ReportDiagnostic> selectedItemSeverities = new HashSet<ReportDiagnostic>(); var groups = _tracker.SelectedDiagnosticItems.GroupBy(item => item.AnalyzerItem.AnalyzersFolder.ProjectId); foreach (var group in groups) { var project = (AbstractProject)workspace.GetHostProject(group.Key); IRuleSetFile ruleSet = project.RuleSetFile; if (ruleSet != null) { var specificOptions = ruleSet.GetSpecificDiagnosticOptions(); foreach (var diagnosticItem in group) { ReportDiagnostic ruleSetSeverity; if (specificOptions.TryGetValue(diagnosticItem.Descriptor.Id, out ruleSetSeverity)) { selectedItemSeverities.Add(ruleSetSeverity); } else { // The rule has no setting. selectedItemSeverities.Add(ReportDiagnostic.Default); } } } } if (selectedItemSeverities.Count != 1) { return; } switch (selectedItemSeverities.Single()) { case ReportDiagnostic.Default: break; case ReportDiagnostic.Error: _setSeverityErrorMenuItem.Checked = true; break; case ReportDiagnostic.Warn: _setSeverityWarningMenuItem.Checked = true; break; case ReportDiagnostic.Info: _setSeverityInfoMenuItem.Checked = true; break; case ReportDiagnostic.Hidden: _setSeverityHiddenMenuItem.Checked = true; break; case ReportDiagnostic.Suppress: _setSeverityNoneMenuItem.Checked = true; break; default: break; } }
private int lua_RequestStatic(string path, Type typ, string methodname) { MethodInfo[] allmethods = typ.GetMethods(BindingFlags.Static | BindingFlags.Public); HashSet<MethodInfo> candidates = new HashSet<MethodInfo>(); foreach (MethodInfo method in allmethods) { if (method.Name == methodname) candidates.Add(method); } if (candidates.Count == 0) { Logger.Error(string.Format("Failed to locate static method {0} on type {1}!", methodname, typ)); return 0; } else if (candidates.Count == 1) { lua.RegisterFunction(path, null, candidates.Single()); return 1; } else { // Lets filter out any methods that use generic, in or out params var filtered = candidates.Where((x) => { if (x.IsGenericMethod || x.ContainsGenericParameters) return false; foreach (ParameterInfo pinfo in x.GetParameters()) { if (!pinfo.IsRetval) { if (pinfo.IsOut) return false; if (pinfo.IsIn) return false; } } return true; }).ToArray(); if (filtered.Length == 1) { lua.RegisterFunction(path, null, filtered[0]); return 1; } else { //LogError(string.Format("Failed to locate suitable static method {0} on type {1}! (there were {2} candidates)", methodname, typ, filtered.Length)); /*for (int i = 0; i < filtered.Length; i++) lua[path + "_Overload" + i.ToString()] = filtered[i];*/ lua[path] = filtered; return filtered.Length; } } }
private bool ApplySuppressionFix(Func<Project, bool> shouldFixInProject, bool selectedEntriesOnly, bool isAddSuppression, bool isSuppressionInSource, bool onlyCompilerDiagnostics, bool showPreviewChangesDialog) { ImmutableDictionary<Document, ImmutableArray<Diagnostic>> documentDiagnosticsToFixMap = null; ImmutableDictionary<Project, ImmutableArray<Diagnostic>> projectDiagnosticsToFixMap = null; var title = isAddSuppression ? ServicesVSResources.SuppressMultipleOccurrences : ServicesVSResources.RemoveSuppressMultipleOccurrences; var waitDialogMessage = isAddSuppression ? ServicesVSResources.ComputingSuppressionFix : ServicesVSResources.ComputingRemoveSuppressionFix; // Get the diagnostics to fix from the suppression state service. Action<CancellationToken> computeDiagnosticsToFix = cancellationToken => { var diagnosticsToFix = _suppressionStateService.GetItemsAsync( selectedEntriesOnly, isAddSuppression, isSuppressionInSource, onlyCompilerDiagnostics, cancellationToken) .WaitAndGetResult(cancellationToken); if (diagnosticsToFix.IsEmpty) { return; } cancellationToken.ThrowIfCancellationRequested(); documentDiagnosticsToFixMap = GetDocumentDiagnosticsToFixAsync(diagnosticsToFix, shouldFixInProject, cancellationToken).WaitAndGetResult(cancellationToken); cancellationToken.ThrowIfCancellationRequested(); projectDiagnosticsToFixMap = isSuppressionInSource ? ImmutableDictionary<Project, ImmutableArray<Diagnostic>>.Empty : GetProjectDiagnosticsToFixAsync(diagnosticsToFix, shouldFixInProject, cancellationToken).WaitAndGetResult(cancellationToken); }; var result = InvokeWithWaitDialog(computeDiagnosticsToFix, title, waitDialogMessage); // Bail out if the user cancelled. if (result == WaitIndicatorResult.Canceled || documentDiagnosticsToFixMap == null || projectDiagnosticsToFixMap == null) { return false; } if (documentDiagnosticsToFixMap.IsEmpty && projectDiagnosticsToFixMap.IsEmpty) { // Nothing to fix. return true; } // Equivalence key determines what fix will be applied. // Make sure we don't include any specific diagnostic ID, as we want all of the given diagnostics (which can have varied ID) to be fixed. var equivalenceKey = isAddSuppression ? (isSuppressionInSource ? FeaturesResources.SuppressWithPragma : FeaturesResources.SuppressWithGlobalSuppressMessage) : FeaturesResources.RemoveSuppressionEquivalenceKeyPrefix; // We have different suppression fixers for every language. // So we need to group diagnostics by the containing project language and apply fixes separately. var languages = new HashSet<string>(projectDiagnosticsToFixMap.Keys.Select(p => p.Language).Concat(documentDiagnosticsToFixMap.Select(kvp => kvp.Key.Project.Language))); var newSolution = _workspace.CurrentSolution; foreach (var language in languages) { // Use the Fix multiple occurrences service to compute a bulk suppression fix for the specified document and project diagnostics, // show a preview changes dialog and then apply the fix to the workspace. var documentDiagnosticsPerLanguage = GetDocumentDiagnosticsMappedToNewSolution(documentDiagnosticsToFixMap, newSolution, language); if (!documentDiagnosticsPerLanguage.IsEmpty) { var suppressionFixer = GetSuppressionFixer(documentDiagnosticsPerLanguage.SelectMany(kvp => kvp.Value), language, _codeFixService); if (suppressionFixer != null) { var suppressionFixAllProvider = suppressionFixer.GetFixAllProvider(); newSolution = _fixMultipleOccurencesService.GetFix( documentDiagnosticsPerLanguage, _workspace, suppressionFixer, suppressionFixAllProvider, equivalenceKey, title, waitDialogMessage, cancellationToken: CancellationToken.None); if (newSolution == null) { // User cancelled or fixer threw an exception, so we just bail out. return false; } } } var projectDiagnosticsPerLanguage = GetProjectDiagnosticsMappedToNewSolution(projectDiagnosticsToFixMap, newSolution, language); if (!projectDiagnosticsPerLanguage.IsEmpty) { var suppressionFixer = GetSuppressionFixer(projectDiagnosticsPerLanguage.SelectMany(kvp => kvp.Value), language, _codeFixService); if (suppressionFixer != null) { var suppressionFixAllProvider = suppressionFixer.GetFixAllProvider(); newSolution = _fixMultipleOccurencesService.GetFix( projectDiagnosticsPerLanguage, _workspace, suppressionFixer, suppressionFixAllProvider, equivalenceKey, title, waitDialogMessage, CancellationToken.None); if (newSolution == null) { // User cancelled or fixer threw an exception, so we just bail out. return false; } } } } if (showPreviewChangesDialog) { newSolution = FixAllGetFixesService.PreviewChanges( _workspace.CurrentSolution, newSolution, fixAllPreviewChangesTitle: title, fixAllTopLevelHeader: title, languageOpt: languages.Count == 1 ? languages.Single() : null, workspace: _workspace); if (newSolution == null) { return false; } } waitDialogMessage = isAddSuppression ? ServicesVSResources.ApplyingSuppressionFix : ServicesVSResources.ApplyingRemoveSuppressionFix; Action<CancellationToken> applyFix = cancellationToken => { var operations = SpecializedCollections.SingletonEnumerable<CodeActionOperation>(new ApplyChangesOperation(newSolution)); _editHandlerService.Apply( _workspace, fromDocument: null, operations: operations, title: title, cancellationToken: cancellationToken); }; result = InvokeWithWaitDialog(applyFix, title, waitDialogMessage); return result == WaitIndicatorResult.Completed; }
private void btnDeleteCommander_Click(object sender, EventArgs e) { var cells = dataGridViewCommanders.SelectedCells; HashSet<int> rowindexes = new HashSet<int>(); foreach (var cell in cells.OfType<DataGridViewCell>()) { if (!rowindexes.Contains(cell.RowIndex)) { rowindexes.Add(cell.RowIndex); } } if (rowindexes.Count == 1) { var row = dataGridViewCommanders.Rows[rowindexes.Single()].DataBoundItem as EDCommander; var result = MessageBox.Show("Do you wish to delete commander " + row.Name + "?", "Delete commander", MessageBoxButtons.YesNo, MessageBoxIcon.Information); if (result == DialogResult.Yes) { EDDConfig.Instance.DeleteCommander(row); //dataGridViewCommanders.DataSource = null; // changing data source ends up, after this, screwing the column sizing.. dataGridViewCommanders.DataSource = EDDConfig.Instance.listCommanders; // can't solve it, TBD dataGridViewCommanders.Update(); _discoveryForm.TravelControl.LoadCommandersListBox(); } } }
public CimInstanceRegularFilter(string propertyName, IEnumerable allowedPropertyValues, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch) { HashSet<BehaviorOnNoMatch> behaviorOnNoMatches = new HashSet<BehaviorOnNoMatch>(); foreach (object allowedPropertyValue in allowedPropertyValues) { ClientSideQuery.PropertyValueFilter propertyValueRegularFilter = new ClientSideQuery.PropertyValueRegularFilter(propertyName, allowedPropertyValue, wildcardsEnabled, behaviorOnNoMatch); base.AddPropertyValueFilter(propertyValueRegularFilter); behaviorOnNoMatches.Add(propertyValueRegularFilter.BehaviorOnNoMatch); } if (behaviorOnNoMatches.Count != 1) { base.BehaviorOnNoMatch = behaviorOnNoMatch; return; } else { base.BehaviorOnNoMatch = behaviorOnNoMatches.Single<BehaviorOnNoMatch>(); return; } }
private void btnDeleteCommander_Click(object sender, EventArgs e) { var cells = dataGridViewCommanders.SelectedCells; HashSet<int> rowindexes = new HashSet<int>(); foreach (var cell in cells.OfType<DataGridViewCell>()) { if (!rowindexes.Contains(cell.RowIndex)) { rowindexes.Add(cell.RowIndex); } } if (rowindexes.Count == 1) { var row = dataGridViewCommanders.Rows[rowindexes.Single()].DataBoundItem as EDCommander; var result = MessageBox.Show("Do you wish to delete commander " + row.Name + "?", "Delete commander", MessageBoxButtons.YesNo, MessageBoxIcon.Information); if (result == DialogResult.Yes) { EDDConfig.Instance.DeleteCommander(row); _discoveryForm.TravelControl.LoadCommandersListBox(); UpdateCommandersListBox(); _discoveryForm.RefreshHistoryAsync(); // will do a new parse on commander list adding/removing scanners } } }
/// <summary> /// Determines whether the given query view matches the discriminator map pattern. /// </summary> internal static bool TryCreateDiscriminatorMap(EntitySet entitySet, DbExpression queryView, out DiscriminatorMap discriminatorMap) { discriminatorMap = null; if (queryView.ExpressionKind != DbExpressionKind.Project) { return false; } var project = (DbProjectExpression)queryView; if (project.Projection.ExpressionKind != DbExpressionKind.Case) { return false; } var caseExpression = (DbCaseExpression)project.Projection; if (project.Projection.ResultType.EdmType.BuiltInTypeKind != BuiltInTypeKind.EntityType) { return false; } // determine value domain by walking filter if (project.Input.Expression.ExpressionKind != DbExpressionKind.Filter) { return false; } var filterExpression = (DbFilterExpression)project.Input.Expression; HashSet<object> discriminatorDomain = new HashSet<object>(); if (!ViewSimplifier.TryMatchDiscriminatorPredicate(filterExpression, (equalsExp, discriminatorValue) => discriminatorDomain.Add(discriminatorValue))) { return false; } var typeMap = new List<KeyValuePair<object, EntityType>>(); var propertyMap = new Dictionary<EdmProperty, DbExpression>(); var relPropertyMap = new Dictionary<Query.InternalTrees.RelProperty, DbExpression>(); var typeToRelPropertyMap = new Dictionary<EntityType, List<Query.InternalTrees.RelProperty>>(); DbPropertyExpression discriminator = null; EdmProperty discriminatorProperty = null; for (int i = 0; i < caseExpression.When.Count; i++) { var when = caseExpression.When[i]; var then = caseExpression.Then[i]; var projectionVariableName = project.Input.VariableName; DbPropertyExpression currentDiscriminator; object discriminatorValue; if (!ViewSimplifier.TryMatchPropertyEqualsValue(when, projectionVariableName, out currentDiscriminator, out discriminatorValue)) { return false; } // must be the same discriminator in every case if (null == discriminatorProperty) { discriminatorProperty = (EdmProperty)currentDiscriminator.Property; } else if (discriminatorProperty != currentDiscriminator.Property) { return false; } discriminator = currentDiscriminator; // right hand side must be entity type constructor EntityType currentType; if (!TryMatchEntityTypeConstructor(then, propertyMap, relPropertyMap, typeToRelPropertyMap, out currentType)) { return false; } // remember type + discriminator value typeMap.Add(new KeyValuePair<object, EntityType>(discriminatorValue, currentType)); // remove discriminator value from domain discriminatorDomain.Remove(discriminatorValue); } // make sure only one member of discriminator domain remains... if (1 != discriminatorDomain.Count) { return false; } // check default case EntityType elseType; if (null == caseExpression.Else || !TryMatchEntityTypeConstructor(caseExpression.Else, propertyMap, relPropertyMap, typeToRelPropertyMap, out elseType)) { return false; } typeMap.Add(new KeyValuePair<object, EntityType>(discriminatorDomain.Single(), elseType)); // Account for cases where some type in the hierarchy specifies a rel-property, but another // type in the hierarchy does not if (!CheckForMissingRelProperties(relPropertyMap, typeToRelPropertyMap)) { return false; } // since the store may right-pad strings, ensure discriminator values are unique in their trimmed // form var discriminatorValues = typeMap.Select(map => map.Key); int uniqueValueCount = discriminatorValues.Distinct(TrailingSpaceComparer.Instance).Count(); int valueCount = typeMap.Count; if (uniqueValueCount != valueCount) { return false; } discriminatorMap = new DiscriminatorMap(discriminator, typeMap, propertyMap, relPropertyMap, entitySet); return true; }
void UpdateCompilerComboBox() { if (listView.SelectedItems.Count > 0) { // Fetch list of available compiler versions HashSet<CompilerVersion> availableVersionsSet = new HashSet<CompilerVersion>(); HashSet<CompilerVersion> currentVersions = new HashSet<CompilerVersion>(); foreach (Entry entry in listView.SelectedItems) { if (entry.CompilerVersion != null) currentVersions.Add(entry.CompilerVersion); availableVersionsSet.AddRange(entry.Project.GetAvailableCompilerVersions()); } List<CompilerVersion> availableVersions = availableVersionsSet.OrderBy(n => n.MSBuildVersion).ThenBy(n => n.DisplayName).ToList(); if (currentVersions.Count != 1) { availableVersions.Insert(0, new UnchangedCompilerVersion()); } // Assign available versions to newVersionComboBox // Unless the user has already chosen a version, automatically set the selection to the // current version of the chosen projects, or to 'do not change' if there are different // current versions. newCompilerSelectionChangingByCode = true; newVersionComboBox.ItemsSource = availableVersions; CompilerVersion oldSelectedVersion = newVersionComboBox.SelectedValue as CompilerVersion; if (!newCompilerSelectionSetByUser || oldSelectedVersion == null) { newCompilerSelectionSetByUser = false; if (currentVersions.Count == 1) newVersionComboBox.SelectedValue = currentVersions.Single(); else newVersionComboBox.SelectedValue = new UnchangedCompilerVersion(); } newCompilerSelectionChangingByCode = false; UpdateTargetFrameworkComboBox(); } }
public CimInstanceRegularFilter(string propertyName, IEnumerable allowedPropertyValues, bool wildcardsEnabled, BehaviorOnNoMatch behaviorOnNoMatch) { var valueBehaviors = new HashSet<BehaviorOnNoMatch>(); foreach (object allowedPropertyValue in allowedPropertyValues) { PropertyValueFilter filter = new PropertyValueRegularFilter( propertyName, allowedPropertyValue, wildcardsEnabled, behaviorOnNoMatch); this.AddPropertyValueFilter(filter); valueBehaviors.Add(filter.BehaviorOnNoMatch); } if (valueBehaviors.Count == 1) { this.BehaviorOnNoMatch = valueBehaviors.Single(); } else { this.BehaviorOnNoMatch = behaviorOnNoMatch; } }
public void HashSetExtensions_Single_ReturnsSingleItemInHashSet() { var set = new HashSet<Int32>() { 4 }; var result = set.Single(); TheResultingValue(result).ShouldBe(4); }
public void HashSetExtensions_Single_ThrowsExceptionIfHashSetHasMultipleItems() { var set = new HashSet<Int32>() { 1, 2 }; Assert.That(() => set.Single(), Throws.TypeOf<InvalidOperationException>()); }
public void HashSetExtensions_Single_ThrowsExceptionIfHashSetIsEmpty() { var set = new HashSet<Int32>(); set.Single(); }
public List<Account> GetUserList(string searchString) { Require.NotEmpty(searchString, nameof(searchString)); var allProjectMemberships = _projectMembershipRepostiory.GetAllProjectMemberships().ToList(); var allUsers = new HashSet<Account>(_userRepository.GetAllAccounts()); var allUsersToSearchByRole = new HashSet<Account>( allProjectMemberships.Select(membership => allUsers.Single(account => account.UserId == membership.DeveloperId))); var userRolesDictionary = allUsersToSearchByRole.ToDictionary(user => user, user => allProjectMemberships.Where(membership => membership.DeveloperId == user.UserId) .Select(that => that.Role)); return userRolesDictionary.Where( pair => pair.Value.Any(role => Extensions.Contains(role, searchString))) .Select(pair => pair.Key) .Union( allUsers.Where( account => Extensions.Contains($"{account.Firstname} {account.Lastname}", searchString))).ToList(); }
public override void OnExecute(CommandEventArgs e) { GitPullArgs args = new GitPullArgs(); string repositoryRoot; var repositoryRoots = new HashSet<string>(FileSystemUtil.StringComparer); foreach (var projectRoot in GetAllRoots(e)) { if ( GitTools.TryGetRepositoryRoot(projectRoot.FullPath, out repositoryRoot) && !repositoryRoots.Contains(repositoryRoot) ) repositoryRoots.Add(repositoryRoot); } if (repositoryRoots.Count > 1) { throw new InvalidOperationException("Pulling of multiple repository roots is not supported"); } repositoryRoot = repositoryRoots.Single(); if (e.Command == VisualGitCommand.PendingChangesPullEx) { if (!QueryParameters(e, repositoryRoot, args)) return; } else { args.MergeStrategy = GitMergeStrategy.DefaultForBranch; } GitPullResult result = null; ProgressRunnerArgs pa = new ProgressRunnerArgs(); pa.CreateLog = true; pa.TransportClientArgs = args; // Get a list of all documents below the specified paths that are open in editors inside VS HybridCollection<string> lockPaths = new HybridCollection<string>(StringComparer.OrdinalIgnoreCase); IVisualGitOpenDocumentTracker documentTracker = e.GetService<IVisualGitOpenDocumentTracker>(); foreach (string file in documentTracker.GetDocumentsBelow(repositoryRoot)) { if (!lockPaths.Contains(file)) lockPaths.Add(file); } documentTracker.SaveDocuments(lockPaths); // Make sure all files are saved before merging! using (DocumentLock lck = documentTracker.LockDocuments(lockPaths, DocumentLockType.NoReload)) using (lck.MonitorChangesForReload()) { GitException exception = null; e.GetService<IProgressRunner>().RunModal(CommandStrings.PullingSolution, pa, delegate(object sender, ProgressWorkerArgs a) { e.GetService<IConflictHandler>().RegisterConflictHandler(args, a.Synchronizer); try { a.Client.Pull(repositoryRoot, args, out result); } catch (GitException ex) { exception = ex; } }); if (exception != null) { e.GetService<IVisualGitErrorHandler>().OnWarning(exception); } } }
public void HashSetExtensions_Single_ThrowsExceptionIfHashSetHasMultipleItems() { var set = new HashSet<Int32>() { 1, 2 }; set.Single(); }