void DownloadXdccList(object aSender, EventArgs <Server, string, Int64, IPAddress, int> aEventArgs) { var download = _xdccListDownloads.SingleOrDefault(c => c.Bot == aEventArgs.Value2); if (download == null) { download = new Download { Server = aEventArgs.Value1, Bot = aEventArgs.Value2, Size = aEventArgs.Value3, IP = aEventArgs.Value4, Port = aEventArgs.Value5, FileName = CalculateXdccListFileName(aEventArgs.Value1, aEventArgs.Value2) }; download.OnDisconnected += DownloadXdccDisconnected; download.OnReady += DownloadXdccReady; _xdccListDownloads.Add(download); download.Start(aEventArgs.Value4 + ":" + aEventArgs.Value5); } else { // uhh - that should not happen _log.Error("DownloadXdccList(" + aEventArgs.Value2 + ") is already downloading"); } }
public static ConfigAccessor CreateConfig (string configFileName, string relativeDirPath, Encoding encoding, ConfigAccess access, string section) { var configPath = Path .Combine(BaseDirectory, relativeDirPath, CONFIG_NAME_TEMPLATE.Format(configFileName as object)); var config = _openedFiles.SingleOrDefault(c => c.FilePath == configPath); if (config == null) { FileStream configFile = IOUtils.TryCreateFileIfNotExistOrOpenOrNull(configPath); if (configFile == null) { throw new Exception("Не удается получить доступ к файлу."); } else { config = new ConfigStorageProxy(configFile); _openedFiles.Add(config); } } var stream = config.GetNewStream(access, section); return(new ConfigAccessor( new ConfigReaderWriter(stream, encoding, section), new ConfigSourceInfo(true, configPath))); }
public void RequeueToDo(Group group) { lock (_locker) { var member = _setDoing.SingleOrDefault(t => t.group.GroupId == group.GroupId); if (member != null) { _setDoing.Remove(member); FlagEnum?flagtype = null; try { RunWithDbContext(_context => { flagtype = _context.Groups.SingleOrDefault(g => g.GroupId == group.GroupId).FlagType; }); } catch (Exception e) { _logger.LogError(e.Message); _logger.LogError(e.StackTrace); } if (flagtype == null) { _logger.LogInformation($"Manually Requeuing {member.group.GroupId}"); _queueToDo.Enqueue(new QueueMember(member.group)); } } } }
void ServerConnect(Server aServer) { if (!aServer.Enabled) { _log.Error("ServerConnect(" + aServer + ") is not enabled"); return; } IrcConnection connection = _connections.SingleOrDefault(c => c.Server == aServer); if (connection == null) { _log.Info("ServerConnect(" + aServer + ")"); connection = new IrcConnection { Server = aServer, Parser = _parser, Scheduler = Scheduler }; _connections.Add(connection); connection.OnDisconnected += ServerDisconnected; connection.OnNotificationAdded += AddNotification; connection.Start(aServer.ToString()); } else { _log.Error("ServerConnect(" + aServer + ") is already in the list"); } }
public bool RemoveChannel(string seed) { lock (_lockObject) { var channel = _channels.SingleOrDefault(x => x.Seed == seed); return(_channels.Remove(channel)); } }
public void Evict(string filename) { CacheEntry found = Cache.SingleOrDefault(p => p.EntryName == filename); if (found != null) { found.Data.Dispose(); } Cache.Remove(found); }
public IEnumerable <string> GetAllCharactersInActivity(string activityId) { var activity = _activities.SingleOrDefault(a => a.ActivityId == activityId); if (activity == null) { throw new InvalidOperationException("Activity doesn't exist on this location."); } return(activity.GetParticipants()); }
public void DoneReview(Group group) { lock (_locker) { var member = _setReviewing.SingleOrDefault(m => m.group.GroupId == group.GroupId); if (member != null) { _setReviewing.Remove(member); } } }
private static void FillRelationships(HashSet <Table> tables, HashSet <Relationship> relationships) { foreach (var relationship in relationships) { var parentTable = tables.SingleOrDefault(t => t == relationship.ParentTable); var refTable = tables.SingleOrDefault(t => t == relationship.ReferencedTable); if (parentTable != null && refTable != null && parentTable != refTable) { parentTable.Relationships.Add(new Relationship(parentTable, refTable, relationship.Name)); } } }
public void AssignPermission(int permissionId) { var role = _rolePermissions.SingleOrDefault(e => e.PermissionId == permissionId); if (role != null) { throw new HouseholdException($"Permission {permissionId} is already assigned to role {this.Id}"); } _rolePermissions.Add(new RolePermission { PermissionId = permissionId }); this.ApplyEvent(new PermissionAssignedEvent(this.Identity, this.Id, permissionId)); }
bool TryGetApplicationAddress(CBusProtcol.ApplicationTypes ApplicationType, out byte Address) { var mapping = AddressMap.SingleOrDefault(x => x.ApplicationType == ApplicationType); if (mapping != null) { Address = mapping.Address; return(true); } else { Address = 0; return(false); } }
public static IsoLanguage Find(string isoCode) { if (isoCode.Length == 2) { //Lookup ISO639-1 code return(All.SingleOrDefault(l => l.TwoLetterCode == isoCode)); } else if (isoCode.Length == 3) { //Lookup ISO639-2T code return(All.SingleOrDefault(l => l.ThreeLetterCode == isoCode)); } return(null); }
private void UnloadComReference(IReference reference, IReadOnlyList <IVBProject> projects) { var referencedProjectId = GetReferenceProjectId(reference, projects); ReferencePriorityMap map = null; try { map = _projectReferences.SingleOrDefault(item => item.ReferencedProjectId == referencedProjectId); } catch (InvalidOperationException exception) { //There are multiple maps with the same referencedProjectId. That should not happen. (ghost?). Logger.Error(exception, "Failed To unload com reference with referencedProjectID {0} because RD stores multiple instances of it.", referencedProjectId); return; } if (map == null || !map.IsLoaded) { // we're removing a reference we weren't tracking? ...this shouldn't happen. return; } map.Remove(referencedProjectId); if (map.Count == 0) { _projectReferences.Remove(map); State.RemoveBuiltInDeclarations(reference); } }
static void Main() { var buyers = new HashSet <IBuyer>(); int countOfPeople = int.Parse(Console.ReadLine()); for (int i = 0; i < countOfPeople; i++) { var currentLine = Console.ReadLine().Split(); if (currentLine.Length == 4) { buyers.Add(new Citizen(currentLine[0], int.Parse(currentLine[1]), currentLine[2], currentLine[3])); } else if (currentLine.Length == 3) { buyers.Add(new Rebel(currentLine[0], int.Parse(currentLine[1]), currentLine[2])); } } string command = null; while ((command = Console.ReadLine()) != "End") { var buyer = buyers.SingleOrDefault(x => x.Name == command); if (buyer != null) { buyer.BuyFood(); } } Console.WriteLine(buyers.Sum(x => x.Food)); }
internal TEntity Add(TEntity entity, EntityState state) { var existingProxy = _cachedEntities.SingleOrDefault(e => ((IEntityProxy)e).GeneratedFrom.Equals(entity)); if (existingProxy != null) { return(existingProxy); } var proxy = entity as IEntityProxy ?? (IEntityProxy)DynamicProxy.Instance.Wrap(entity); entity = (TEntity)proxy; _cachedEntities.Add(entity); proxy.State = state; foreach (var column in Columns.Where(c => c.ReferenceTable != null)) { var reference = column.GetValue(entity); if (reference == null) { continue; } var referenceEntityType = reference.GetType(); var referenceDbSetType = typeof(DbSet <>).MakeGenericType(referenceEntityType); var referenceDbSet = _relatedDbSets.Single(d => d.GetType() == referenceDbSetType); var add = referenceDbSetType.GetMethod("Add", new[] { referenceEntityType }); var referenceProxy = add.Invoke(referenceDbSet, new[] { reference }); column.SetValue(entity, referenceProxy); } return(entity); }
internal void AutoMapDynamicTypeColumnsAction(params string[] ignorePropertyExpressions) { VerifyAutoMapAlreadyCalled(); var properties = (IDictionary <string, object>)_data.Item; var ignorePropertyNames = new HashSet <string>(); if (ignorePropertyExpressions != null) { foreach (var ignorePropertyExpression in ignorePropertyExpressions) { ignorePropertyNames.Add(ignorePropertyExpression); } } foreach (var property in properties) { var ignoreProperty = ignorePropertyNames.SingleOrDefault(x => x.Equals(property.Key, StringComparison.CurrentCultureIgnoreCase)); if (ignoreProperty == null) { ColumnAction(property.Key, property.Value, typeof(object), DataTypes.Object, 0); } } }
public IEnumerable <string> CheckProjectReferencesExistenceInSolution(string solutionPath, string projectFileExtension) { if (File.GetAttributes(solutionPath).HasFlag(FileAttributes.Directory)) { throw new ArgumentException($"You passed a folder path {solutionPath} to the {nameof(solutionPath)} parameter. Please pass the solution file path."); } var messages = new List <string>(); var absoluteProjectPaths = new HashSet <string>(_solutionParser.ExtractSolutionProjects(solutionPath, projectFileExtension).Select(sp => Path.GetFullPath(sp.ProjectPath))); foreach (var projectPath in absoluteProjectPaths) { var projectDir = Path.GetDirectoryName(projectPath); var absoluteReferencePaths = _referencesExtractor.GetProjectReferencePaths(projectPath).Select(p => Path.GetFullPath(Path.Combine(projectDir, p))); foreach (var referencePath in absoluteReferencePaths) { if (!absoluteProjectPaths.Contains(referencePath)) { var shouldBeTowards = absoluteProjectPaths.SingleOrDefault(pp => pp.EndsWith(Path.GetFileName(referencePath))); var message = $"Project {Path.GetFileName(projectPath)} has a broken reference to {referencePath}."; if (string.IsNullOrEmpty(shouldBeTowards)) { message += $" This reference is completely missing from the solution {Path.GetFileName(solutionPath)}"; } else { message += $" The reference should be towards {shouldBeTowards}"; } messages.Add(message); } } } return(messages); }
public string Run() { Queue <Branch> toBeProcessed = new Queue <Branch>(); this.CurrentDepth = 0; toBeProcessed.Enqueue(this); //Process balls while (toBeProcessed.Count > 0 && toBeProcessed.Peek().CurrentDepth < Depth) { Branch parent = toBeProcessed.Dequeue(); ProcessBalls(parent); if (parent.Left.Balls.Count > 0) { toBeProcessed.Enqueue(parent.Left); } if (parent.Right.Balls.Count > 0) { toBeProcessed.Enqueue(parent.Right); } } //Store balls in the container ContainersWithBall = new Dictionary <string, int>(); while (toBeProcessed.Count > 0) { Branch temp = toBeProcessed.Dequeue(); ContainersWithBall[temp.gateName] = temp.Balls[0]; Containers.Remove(temp.gateName); } // return the container name without any ball return(Containers.SingleOrDefault()); }
///// <summary> ///// Shows the OpenFileDialog. ///// </summary> ///// <param name="ownerViewModel">A ViewModel that represents the owner window of the ///// dialog</param> ///// <param name="openFileDialog">The interface of a open file dialog.</param> ///// <returns>DialogResult.OK if successful; otherwise DialogResult.Cancel.</returns> //public DialogResult ShowOpenFileDialog(object ownerViewModel, IOpenFileDialog openFileDialog) //{ // Contract.Requires(ownerViewModel != null); // Contract.Requires(openFileDialog != null); // // Create OpenFileDialog with specified ViewModel // OpenFileDialog dialog = new OpenFileDialog(openFileDialog); // // Show dialog // return dialog.ShowDialog(new WindowWrapper(FindOwnerWindow(ownerViewModel))); //} ///// <summary> ///// Shows the FolderBrowserDialog. ///// </summary> ///// <param name="ownerViewModel">A ViewModel that represents the owner window of the dialog. ///// </param> ///// <param name="folderBrowserDialog">The interface of a folder browser dialog.</param> ///// <returns>The DialogResult.OK if successful; otherwise DialogResult.Cancel.</returns> //public DialogResult ShowFolderBrowserDialog(object ownerViewModel, FolderBrowserDialogViewModel viewModel) //{ // Contract.Requires(ownerViewModel != null); // Contract.Requires(viewModel != null); // // Create/Show FolderBrowserDialog with specified ViewModel // FolderBrowserDialog dialog = new FolderBrowserDialog(viewModel); // return dialog.ShowDialog(new WindowWrapper(FindOwnerWindow(ownerViewModel))); //} #endregion /// <summary> /// Finds window corresponding to specified ViewModel. /// </summary> private Window FindOwnerWindow(object owner) { if (owner == null) { return(null); } // Support direct use of owner window Window ownerWindow = owner as Window; if (ownerWindow != null) { return(ownerWindow); } FrameworkElement view = _views.SingleOrDefault(v => ReferenceEquals(v.DataContext, owner)); if (view == null) { return(null); // throw new ArgumentException(/*MSG0*/"Viewmodel is not referenced by any registered View."); } // Get owner window ownerWindow = view as Window; if (ownerWindow == null) { ownerWindow = Window.GetWindow(view); } return(ownerWindow); }
PropertyAccessor <TEntity, TProperty> IRootAccessor.GetByPath <TEntity, TProperty>(List <string> pathParts) { var thisPath = pathParts.First(); //TODO get by Type and PropertyName to account for multiple inherited classes with same PropertyName var accessor = Properties.SingleOrDefault(x => x.PropertyName == thisPath); if (accessor == null) { return(null); } var result = accessor.FindAccessor(pathParts.Skip(1).ToList()); return(result as PropertyAccessor <TEntity, TProperty>); }
MatchPersonToAccount( IEnumerable <Group> groups, IEnumerable <Account> accounts, IEnumerable <string> emails) { HashSet <Person> hpeople = new HashSet <Person>(); HashSet <Account> haccounts = new HashSet <Account>(accounts); HashSet <string> hemails = new HashSet <string>(emails); //groups.ToList().ForEach(x => hpeople.A(x.)); List <Person> list = new List <Person>(); foreach (var item in groups) { list.AddRange(item.People); } hpeople = new HashSet <Person>(list); List <(Account, Person)> ret = new List <(Account, Person)>(); foreach (var email in emails) { var pers = hpeople.SingleOrDefault(x => x.Emails.Any(y => y.Email == email) ); var acc = haccounts.Where(x => x.EmailAddress.Email == email).ToList(); acc.ToList().ForEach(x => ret.Add((x, pers))); } return(ret); }
/// <summary> /// Finds window corresponding to specified ViewModel. /// </summary> private Window FindOwnerWindow(object viewModel) { var view = views.SingleOrDefault(v => ReferenceEquals(v.DataContext, viewModel)); if (view == null) { throw new ArgumentException("Viewmodel is not referenced by any registered View."); } // Get owner window Window owner = view as Window; if (owner == null) { owner = Window.GetWindow(view); } // Make sure owner window was found if (owner == null) { throw new InvalidOperationException("View is not contained within a Window."); } return(owner); }
internal void AutoMapColumnsAction <T>(params Expression <Func <T, object> >[] ignorePropertyExpressions) { VerifyAutoMapAlreadyCalled(); var properties = ReflectionHelper.GetProperties(_data.Item.GetType()); var ignorePropertyNames = new HashSet <string>(); if (ignorePropertyExpressions != null) { foreach (var ignorePropertyExpression in ignorePropertyExpressions) { var ignorePropertyName = new PropertyExpressionParser <T>(_data.Item, ignorePropertyExpression).Name; ignorePropertyNames.Add(ignorePropertyName); } } foreach (var property in properties) { var ignoreProperty = ignorePropertyNames.SingleOrDefault(x => x.Equals(property.Value.Name, StringComparison.CurrentCultureIgnoreCase)); if (ignoreProperty != null) { continue; } var propertyType = ReflectionHelper.GetPropertyType(property.Value); var propertyValue = ReflectionHelper.GetPropertyValue(_data.Item, property.Value); ColumnAction(property.Value.Name, propertyValue, propertyType, DataTypes.Object, 0); } }
public void PutFormulas(IEnumerable <Formula> formulas) { ICollection <TrainingDay> temp = new HashSet <TrainingDay>(); foreach (Formula formula in formulas) { List <TrainingDay> trainingDays = formula.FormulaTrainingDays.Select(td => td.TrainingDay).ToList(); if (trainingDays != null && formula != null) { foreach (TrainingDay day in trainingDays) { if (!temp.Contains(day) && day != null) { temp.Add(day); } } } } foreach (Formula f in formulas) { SessionFormulas.Add(new SessionFormula(SessionId, this, f.FormulaId, f)); } TrainingDay prefDay = temp.SingleOrDefault(t => t.DayOfWeek.Equals(DateTime.Today.DayOfWeek)); TrainingDay = prefDay != null ? prefDay : temp.First(t => t != null); }
public Session(IEnumerable <Formula> formulas, Teacher teacher, IEnumerable <Member> members) { MembersPresent = new List <Member>(); SessionFormulas = new List <SessionFormula>(); Members = members; ICollection <TrainingDay> temp = new HashSet <TrainingDay>(); foreach (Formula formula in formulas) { List <TrainingDay> trainingDays = formula.FormulaTrainingDays.Select(td => td.TrainingDay).ToList(); if (trainingDays != null && formula != null) { foreach (TrainingDay day in trainingDays) { if (!temp.Contains(day) && day != null) { temp.Add(day); } } } } TrainingDay prefDay = temp.SingleOrDefault(t => t.DayOfWeek.Equals(DateTime.Today.DayOfWeek)); TrainingDay = prefDay != null ? prefDay : temp.First(t => t != null); Date = DateTime.Now; Teacher = teacher; SessionMembers = new List <SessionMember>(); NonMembers = new List <NonMember>(); foreach (Formula f in formulas) { SessionFormulas.Add(new SessionFormula(SessionId, this, f.FormulaId, f)); } }
internal LSLConstantJumpDescription GetConstantJump() { if (!HasElseIfStatements && !HasElseStatement) { return(null); } var cmp = new JumpCmp(); var i = new HashSet <LSLConstantJumpDescription>(IfStatement.ConstantJumps, cmp); if (HasElseIfStatements) { foreach (var node in ElseIfStatements) { var ie = new HashSet <LSLConstantJumpDescription>(node.ConstantJumps, cmp); i.IntersectWith(ie); } } if (HasElseStatement) { var e = new HashSet <LSLConstantJumpDescription>(ElseStatement.ConstantJumps, cmp); i.IntersectWith(e); } var x = i.SingleOrDefault(); if (x != null) { x = new LSLConstantJumpDescription(x, this); } return(x); }
private ResourceObject TryGetBuiltResourceObjectFor(IIdentifiable resource) { Type resourceType = resource.GetType(); ResourceContext resourceContext = ResourceContextProvider.GetResourceContext(resourceType); return(_included.SingleOrDefault(resourceObject => resourceObject.Type == resourceContext.PublicName && resourceObject.Id == resource.StringId)); }
public IEnumerable <string> Find(string searchPattern) { var factory = new ChainFactory(); var targetNode = factory.Create(searchPattern); if (!_heads.Contains(targetNode)) { return(Enumerable.Empty <string>()); } var searchHead = _heads.SingleOrDefault(x => x.Data.Equals(searchPattern.First())); var accumulator = new PathAccumulator(); var enumerator = new EnumeratorAccumulatingBranches(searchHead, accumulator); while (enumerator.MoveNext()) { } var pathEnumerator = new EnumeratorTraversingSpecifiedPath(searchHead, targetNode.SubNodes.LastOrDefault()); while (pathEnumerator.MoveNext()) { } IEnumerable <string> result = accumulator.Paths; if (pathEnumerator.IsDestinationReached) { result = result.Concat(new string[] { searchPattern }); } return(result); }
private IEnumerable <MostUsedPitchServiceModel> GetMostUsedPitchFromAllGames(ICollection <MostUsedPitchServiceModel> mostUsedInGather, ICollection <MostUsedPitchServiceModel> mostUsedInRecruiting) { var mostUsedPitches = new HashSet <MostUsedPitchServiceModel>(mostUsedInGather); foreach (var pitch in mostUsedInRecruiting) { var fieldFromDb = this.pitchRepository.All() .Include(x => x.Address) .ThenInclude(x => x.Town) .ThenInclude(x => x.Province) .ThenInclude(x => x.Country) .SingleOrDefault(x => x.Id == pitch.Id); if (fieldFromDb != null) { var fieldLocation = string.Concat(fieldFromDb.Address.Town.Name, ", ", fieldFromDb.Address.Town.Province.Country.Name); pitch.Location = fieldLocation; } if (mostUsedPitches.Any(x => x.Id == pitch.Id)) { var current = mostUsedPitches.SingleOrDefault(x => x.Id == pitch.Id); current.TimesUsed += pitch.TimesUsed; current.Location = pitch.Location; continue; } mostUsedPitches.Add(pitch); } return(mostUsedPitches); }
public void RemoveReview(int reviewId, DbContext context = null) { ReviewWithEvents review; if (_reviews != null) { //This is there to handle the add/remove of reviews when first created (or someone uses an .Include(p => p.Reviews) review = _reviews.SingleOrDefault(x => x.ReviewId == reviewId); if (review == null) { throw new InvalidOperationException("The review with that key was not found in the book's Reviews."); } _reviews.Remove(review); } else if (context == null) { throw new ArgumentNullException(nameof(context), "You must provide a context if the Reviews collection isn't valid."); } else { review = context.Find <ReviewWithEvents>(reviewId); if (review == null || review.BookId != BookId) { // This ensures that the review is a) linked to the book you defined, and b) the review has a valid primary key throw new InvalidOperationException("The review either wasn't found or was not linked to this Book."); } context.Remove(review); } AddEvent(new BookReviewRemovedEvent(review, this, UpdateReviewCachedValues)); }
public void HashSetExtensions_SingleOrDefault_ThrowsExceptionIfHashSetHasMultipleItems() { var set = new HashSet<Int32>() { 1, 2 }; set.SingleOrDefault(); }
private bool ApplySuppressionFix(Func<Project, bool> shouldFixInProject, bool selectedEntriesOnly, bool isAddSuppression, bool isSuppressionInSource, bool onlyCompilerDiagnostics, bool showPreviewChangesDialog) { ImmutableDictionary<Document, ImmutableArray<Diagnostic>> documentDiagnosticsToFixMap = null; ImmutableDictionary<Project, ImmutableArray<Diagnostic>> projectDiagnosticsToFixMap = null; var title = isAddSuppression ? ServicesVSResources.SuppressMultipleOccurrences : ServicesVSResources.RemoveSuppressMultipleOccurrences; var waitDialogMessage = isAddSuppression ? ServicesVSResources.ComputingSuppressionFix : ServicesVSResources.ComputingRemoveSuppressionFix; // Get the diagnostics to fix from the suppression state service. Action<CancellationToken> computeDiagnosticsToFix = cancellationToken => { var diagnosticsToFix = _suppressionStateService.GetItemsAsync( selectedEntriesOnly, isAddSuppression, isSuppressionInSource, onlyCompilerDiagnostics, cancellationToken) .WaitAndGetResult(cancellationToken); if (diagnosticsToFix.IsEmpty) { return; } cancellationToken.ThrowIfCancellationRequested(); documentDiagnosticsToFixMap = GetDocumentDiagnosticsToFixAsync(diagnosticsToFix, shouldFixInProject, cancellationToken).WaitAndGetResult(cancellationToken); cancellationToken.ThrowIfCancellationRequested(); projectDiagnosticsToFixMap = isSuppressionInSource ? ImmutableDictionary<Project, ImmutableArray<Diagnostic>>.Empty : GetProjectDiagnosticsToFixAsync(diagnosticsToFix, shouldFixInProject, cancellationToken).WaitAndGetResult(cancellationToken); }; var result = InvokeWithWaitDialog(computeDiagnosticsToFix, title, waitDialogMessage); // Bail out if the user cancelled. if (result == WaitIndicatorResult.Canceled || documentDiagnosticsToFixMap == null || projectDiagnosticsToFixMap == null) { return false; } if (documentDiagnosticsToFixMap.IsEmpty && projectDiagnosticsToFixMap.IsEmpty) { // Nothing to fix. return true; } // Equivalence key determines what fix will be applied. // Make sure we don't include any specific diagnostic ID, as we want all of the given diagnostics (which can have varied ID) to be fixed. var equivalenceKey = isAddSuppression ? (isSuppressionInSource ? FeaturesResources.SuppressWithPragma : FeaturesResources.SuppressWithGlobalSuppressMessage) : FeaturesResources.RemoveSuppressionEquivalenceKeyPrefix; // We have different suppression fixers for every language. // So we need to group diagnostics by the containing project language and apply fixes separately. var languages = new HashSet<string>(projectDiagnosticsToFixMap.Keys.Select(p => p.Language).Concat(documentDiagnosticsToFixMap.Select(kvp => kvp.Key.Project.Language))); var newSolution = _workspace.CurrentSolution; foreach (var language in languages) { // Use the Fix multiple occurrences service to compute a bulk suppression fix for the specified document and project diagnostics, // show a preview changes dialog and then apply the fix to the workspace. var documentDiagnosticsPerLanguage = GetDocumentDiagnosticsMappedToNewSolution(documentDiagnosticsToFixMap, newSolution, language); if (!documentDiagnosticsPerLanguage.IsEmpty) { var suppressionFixer = GetSuppressionFixer(documentDiagnosticsPerLanguage.SelectMany(kvp => kvp.Value), language, _codeFixService); if (suppressionFixer != null) { var suppressionFixAllProvider = suppressionFixer.GetFixAllProvider(); newSolution = _fixMultipleOccurencesService.GetFix( documentDiagnosticsPerLanguage, _workspace, suppressionFixer, suppressionFixAllProvider, equivalenceKey, title, waitDialogMessage, cancellationToken: CancellationToken.None); if (newSolution == null) { // User cancelled or fixer threw an exception, so we just bail out. return false; } } } var projectDiagnosticsPerLanguage = GetProjectDiagnosticsMappedToNewSolution(projectDiagnosticsToFixMap, newSolution, language); if (!projectDiagnosticsPerLanguage.IsEmpty) { var suppressionFixer = GetSuppressionFixer(projectDiagnosticsPerLanguage.SelectMany(kvp => kvp.Value), language, _codeFixService); if (suppressionFixer != null) { var suppressionFixAllProvider = suppressionFixer.GetFixAllProvider(); newSolution = _fixMultipleOccurencesService.GetFix( projectDiagnosticsPerLanguage, _workspace, suppressionFixer, suppressionFixAllProvider, equivalenceKey, title, waitDialogMessage, CancellationToken.None); if (newSolution == null) { // User cancelled or fixer threw an exception, so we just bail out. return false; } } } } if (showPreviewChangesDialog) { newSolution = FixAllGetFixesService.PreviewChanges( _workspace.CurrentSolution, newSolution, fixAllPreviewChangesTitle: title, fixAllTopLevelHeader: title, languageOpt: languages.SingleOrDefault(), workspace: _workspace); if (newSolution == null) { return false; } } waitDialogMessage = isAddSuppression ? ServicesVSResources.ApplyingSuppressionFix : ServicesVSResources.ApplyingRemoveSuppressionFix; Action<CancellationToken> applyFix = cancellationToken => { var operations = SpecializedCollections.SingletonEnumerable<CodeActionOperation>(new ApplyChangesOperation(newSolution)); _editHandlerService.Apply( _workspace, fromDocument: null, operations: operations, title: title, cancellationToken: cancellationToken); }; result = InvokeWithWaitDialog(applyFix, title, waitDialogMessage); return result == WaitIndicatorResult.Completed; }
public void HashSetExtensions_SingleOrDefault_ReturnsDefaultValueIfHashSetIsEmpty() { var set = new HashSet<Int32>(); var result = set.SingleOrDefault(); TheResultingValue(result).ShouldBe(default(Int32)); }
public void TypeList_Resolves_Explicit_Types() { var types = new HashSet<PluginManager.TypeList>(); var propEditors = new PluginManager.TypeList<PropertyEditor>(PluginManager.TypeResolutionKind.FindAllTypes); propEditors.AddType(typeof(LabelPropertyEditor)); types.Add(propEditors); var found = types.SingleOrDefault(x => x.IsTypeList<PropertyEditor>(PluginManager.TypeResolutionKind.FindAllTypes)); Assert.IsNotNull(found); //This should not find a type list of this type var shouldNotFind = types.SingleOrDefault(x => x.IsTypeList<IParameterEditor>(PluginManager.TypeResolutionKind.FindAllTypes)); Assert.IsNull(shouldNotFind); }
public void HashSetExtensions_SingleOrDefault_ThrowsExceptionIfHashSetHasMultipleItems() { var set = new HashSet<Int32>() { 1, 2 }; Assert.That(() => set.SingleOrDefault(), Throws.TypeOf<InvalidOperationException>()); }
public void HashSetExtensions_SingleOrDefault_ReturnsSingleItemInHashSet() { var set = new HashSet<Int32>() { 4 }; var result = set.SingleOrDefault(); TheResultingValue(result).ShouldBe(4); }
/// <summary> /// Updating the repo: /// 1. load saved share meta data (meta data will be as current as last time online user was online) /// 2. load current state of share folder /// 3. compare the two sets for adds/mods/deletes /// 4. push changes to the server, updating meta data on success /// a. if there is a conflicted file, copy it locally using name server gives /// 5. ask server for current file list /// 6. compare this list against local share /// 7. download out-of-date and new files, updating meta data as we go /// 8. write out meta data /// </summary> public void SyncWithTheCloud() { // 1. load saved share meta data (meta data will be as current as last time online user was online) ShareMetaData = this.loadMetaData(); // 2. load current state of share folder LocalShare = this.GetFiles(); // 3. compare the two sets for adds/mods/deletes HashSet<File> changeSet = this.GetChangeSet(this.LocalShare, this.ShareMetaData, new StartupComparer()); // 4. push changes to the server, updating meta data on success foreach (File f in changeSet) { // differentiate deletes from adds if (f.content == null) // delete { File deleteResult = OrangeCloudServer.DeleteFile(f); // if the file is null, the server didnt respond // we'll do nothing now and try to push again later if (deleteResult == null) { continue; } // if server returns -1, we have a conflict else if (deleteResult.version == -1) { // do something } // operation was successful with server else { // update file meta data File metaFile = ShareMetaData.Where(file => file.fullPath == f.fullPath).First(); if (metaFile != null) { ShareMetaData.Remove(metaFile); } } } else // add { File addResult = OrangeCloudServer.Add(f); // if the file is null, the server didnt respond // we'll do nothing now and try to push again later if (addResult == null) { continue; } // if server returns -1, we have a conflict else if (addResult.version == -1) { // do something } // operation was successful with server else { // update file meta data // if the file is in the meta data already if (ShareMetaData.Contains(f)) { // update the version ShareMetaData.SingleOrDefault(file => file.fullPath == f.fullPath).version = addResult.version; } // add it to the meta data else { f.version = addResult.version; ShareMetaData.Add(f); } } } } changeSet.RemoveWhere(files => files.content == null); // 5. ask server for current file list HashSet<File> ServerList = OrangeCloudServer.GetFiles(); // 6. compare this list against local share HashSet<File> newFiles = this.GetChangeSet(ServerList, LocalShare, new DefaultComparer()); // 7. download out-of-date and new files, updating meta data as we go this.DownloadNewFiles(newFiles); // 8. write out meta data System.IO.File.WriteAllText(Environment.CurrentDirectory + "\\metadata.json", JsonConvert.SerializeObject(ShareMetaData)); }
/// <summary> /// Validate And Import Products With Variants /// </summary> /// <param name="spreadsheet"></param> /// <param name="parseErrors"></param> /// <returns></returns> public HashSet<ProductImportDataTransferObject> ValidateAndImportProductsWithVariants(ExcelPackage spreadsheet, ref Dictionary<string, List<string>> parseErrors) { var productsToImport = new HashSet<ProductImportDataTransferObject>(); if (!parseErrors.Any()) { if (spreadsheet != null) { if (spreadsheet.Workbook != null) { var worksheet = spreadsheet.Workbook.Worksheets.SingleOrDefault(x => x.Name == "Items"); if (worksheet != null) { var totalRows = worksheet.Dimension.End.Row; for (var rowId = 2; rowId <= totalRows; rowId++) { if (!worksheet.GetValue<string>(rowId, 1).HasValue() && !worksheet.GetValue<string>(rowId, 2).HasValue() && !worksheet.GetValue<string>(rowId, 3).HasValue()) continue; var product = new ProductImportDataTransferObject(); //Prepare handle name for storing and grouping errors string url = worksheet.GetValue<string>(rowId, 1), name = worksheet.GetValue<string>(rowId, 2); var handle = url.HasValue() ? url : SeoHelper.TidyUrl(name); if (!productsToImport.Any(x => x.Name == name || x.UrlSegment == url)) { if (parseErrors.All(x => x.Key != handle)) parseErrors.Add(handle, new List<string>()); product.UrlSegment = worksheet.GetValue<string>(rowId, 1).HasValue() ? worksheet.GetValue<string>(rowId, 1) : _urlService.Suggest(null, new SuggestParams { PageName = name, DocumentType = typeof (Product).FullName }); //skip duplicate url if (productsToImport.Any(x => x.UrlSegment == product.UrlSegment)) continue; GetBasicData(parseErrors, worksheet, rowId, product, handle); GetCategories(parseErrors, worksheet, rowId, product, handle); GetSpecifications(parseErrors, worksheet, rowId, handle, product); GetImages(worksheet, rowId, product); GetUrlHistory(parseErrors, worksheet, rowId, product, handle); productsToImport.Add(product); } else { product = !string.IsNullOrWhiteSpace(url) ? productsToImport.SingleOrDefault(x => x.Name == name && x.UrlSegment == url) : productsToImport.SingleOrDefault(x => x.Name == name); } //Variants if (product != null) { var productVariant = GetProductVariant(parseErrors, worksheet, rowId, handle); if (productVariant != null) { //Options GetProductVariantOptions(worksheet, rowId, productVariant); //Price Breaks GetPriceBreaks(parseErrors, worksheet, rowId, handle, productVariant); product.ProductVariants.Add(productVariant); } } } } } } //Remove handles with no errors parseErrors = parseErrors.Where(x => x.Value.Any()).ToDictionary(pair => pair.Key, pair => pair.Value); } var i = productsToImport.Where(x => x.ProductVariants.Count == 0).ToList(); return productsToImport; }